joins;
+ private SelectStatement subquery;
+}
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBase.interp b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBase.interp
deleted file mode 100644
index b46bdc67ef..0000000000
--- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBase.interp
+++ /dev/null
@@ -1,55 +0,0 @@
-token literal names:
-null
-'.'
-null
-null
-'TOPICS'
-'CONSUMERS'
-null
-null
-null
-null
-null
-null
-null
-null
-'/**/'
-null
-null
-
-token symbolic names:
-null
-null
-SHOW
-PATHS
-TOPICS
-CONSUMERS
-DATABASES
-TABLES
-SELECT
-FROM
-STRING
-IDENTIFIER
-BACKQUOTED_IDENTIFIER
-SIMPLE_COMMENT
-BRACKETED_EMPTY_COMMENT
-BRACKETED_COMMENT
-WS
-
-rule names:
-singleStatement
-statement
-kafkaQueryTopicStatement
-kafkaQueryConsumerStatement
-kafkaQueryStatement
-kafkaStatement
-childPathStatement
-columnStatement
-fromClause
-tableName
-identifier
-quotedIdentifier
-
-
-atn:
-[4, 1, 16, 92, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 1, 0, 5, 0, 26, 8, 0, 10, 0, 12, 0, 29, 9, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 38, 8, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 44, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 56, 8, 3, 1, 4, 1, 4, 3, 4, 60, 8, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 3, 6, 67, 8, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 77, 8, 9, 10, 9, 12, 9, 80, 9, 9, 1, 10, 1, 10, 1, 10, 5, 10, 85, 8, 10, 10, 10, 12, 10, 88, 9, 10, 1, 11, 1, 11, 1, 11, 0, 0, 12, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 0, 0, 92, 0, 27, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 43, 1, 0, 0, 0, 6, 55, 1, 0, 0, 0, 8, 59, 1, 0, 0, 0, 10, 61, 1, 0, 0, 0, 12, 66, 1, 0, 0, 0, 14, 68, 1, 0, 0, 0, 16, 70, 1, 0, 0, 0, 18, 73, 1, 0, 0, 0, 20, 86, 1, 0, 0, 0, 22, 89, 1, 0, 0, 0, 24, 26, 3, 2, 1, 0, 25, 24, 1, 0, 0, 0, 26, 29, 1, 0, 0, 0, 27, 25, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 1, 1, 0, 0, 0, 29, 27, 1, 0, 0, 0, 30, 31, 5, 2, 0, 0, 31, 38, 3, 12, 6, 0, 32, 33, 5, 8, 0, 0, 33, 34, 3, 14, 7, 0, 34, 35, 3, 16, 8, 0, 35, 38, 1, 0, 0, 0, 36, 38, 3, 10, 5, 0, 37, 30, 1, 0, 0, 0, 37, 32, 1, 0, 0, 0, 37, 36, 1, 0, 0, 0, 38, 3, 1, 0, 0, 0, 39, 40, 5, 2, 0, 0, 40, 44, 5, 4, 0, 0, 41, 42, 5, 2, 0, 0, 42, 44, 5, 6, 0, 0, 43, 39, 1, 0, 0, 0, 43, 41, 1, 0, 0, 0, 44, 5, 1, 0, 0, 0, 45, 46, 5, 2, 0, 0, 46, 56, 5, 5, 0, 0, 47, 48, 5, 2, 0, 0, 48, 49, 5, 5, 0, 0, 49, 56, 3, 16, 8, 0, 50, 51, 5, 2, 0, 0, 51, 56, 5, 7, 0, 0, 52, 53, 5, 2, 0, 0, 53, 54, 5, 7, 0, 0, 54, 56, 3, 16, 8, 0, 55, 45, 1, 0, 0, 0, 55, 47, 1, 0, 0, 0, 55, 50, 1, 0, 0, 0, 55, 52, 1, 0, 0, 0, 56, 7, 1, 0, 0, 0, 57, 60, 3, 4, 2, 0, 58, 60, 3, 6, 3, 0, 59, 57, 1, 0, 0, 0, 59, 58, 1, 0, 0, 0, 60, 9, 1, 0, 0, 0, 61, 62, 3, 8, 4, 0, 62, 11, 1, 0, 0, 0, 63, 67, 5, 3, 0, 0, 64, 65, 5, 3, 0, 0, 65, 67, 3, 16, 8, 0, 66, 63, 1, 0, 0, 0, 66, 64, 1, 0, 0, 0, 67, 13, 1, 0, 0, 0, 68, 69, 3, 20, 10, 0, 69, 15, 1, 0, 0, 0, 70, 71, 5, 9, 0, 0, 71, 72, 3, 18, 9, 0, 72, 17, 1, 0, 0, 0, 73, 78, 3, 20, 10, 0, 74, 75, 5, 1, 0, 0, 75, 77, 3, 20, 10, 0, 76, 74, 1, 0, 0, 0, 77, 80, 1, 0, 0, 0, 78, 76, 1, 0, 0, 0, 78, 79, 1, 0, 0, 0, 79, 19, 1, 0, 0, 0, 80, 78, 1, 0, 0, 0, 81, 85, 5, 11, 0, 0, 82, 85, 5, 10, 0, 0, 83, 85, 3, 22, 11, 0, 84, 81, 1, 0, 0, 0, 84, 82, 1, 0, 0, 0, 84, 83, 1, 0, 0, 0, 85, 88, 1, 0, 0, 0, 86, 84, 1, 0, 0, 0, 86, 87, 1, 0, 0, 0, 87, 21, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89, 90, 5, 12, 0, 0, 90, 23, 1, 0, 0, 0, 9, 27, 37, 43, 55, 59, 66, 78, 84, 86]
\ No newline at end of file
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseListener.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseListener.java
deleted file mode 100644
index 65b505500d..0000000000
--- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseListener.java
+++ /dev/null
@@ -1,184 +0,0 @@
-// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0
-package io.edurt.datacap.sql.parser;
-
-import org.antlr.v4.runtime.ParserRuleContext;
-import org.antlr.v4.runtime.tree.ErrorNode;
-import org.antlr.v4.runtime.tree.TerminalNode;
-
-/**
- * This class provides an empty implementation of {@link SqlBaseListener},
- * which can be extended to create a listener which only needs to handle a subset
- * of the available methods.
- */
-@SuppressWarnings("CheckReturnValue")
-public class SqlBaseBaseListener implements SqlBaseListener {
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterSingleStatement(SqlBaseParser.SingleStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitSingleStatement(SqlBaseParser.SingleStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterStatement(SqlBaseParser.StatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitStatement(SqlBaseParser.StatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterKafkaStatement(SqlBaseParser.KafkaStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitKafkaStatement(SqlBaseParser.KafkaStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterColumnStatement(SqlBaseParser.ColumnStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitColumnStatement(SqlBaseParser.ColumnStatementContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterFromClause(SqlBaseParser.FromClauseContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitFromClause(SqlBaseParser.FromClauseContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterTableName(SqlBaseParser.TableNameContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitTableName(SqlBaseParser.TableNameContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterIdentifier(SqlBaseParser.IdentifierContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitIdentifier(SqlBaseParser.IdentifierContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx) { }
-
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterEveryRule(ParserRuleContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitEveryRule(ParserRuleContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void visitTerminal(TerminalNode node) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void visitErrorNode(ErrorNode node) { }
-}
\ No newline at end of file
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseVisitor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseVisitor.java
deleted file mode 100644
index 0b54b22aa2..0000000000
--- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseVisitor.java
+++ /dev/null
@@ -1,99 +0,0 @@
-// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0
-package io.edurt.datacap.sql.parser;
-import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
-
-/**
- * This class provides an empty implementation of {@link SqlBaseVisitor},
- * which can be extended to create a visitor which only needs to handle a subset
- * of the available methods.
- *
- * @param The return type of the visit operation. Use {@link Void} for
- * operations with no return type.
- */
-@SuppressWarnings("CheckReturnValue")
-public class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBaseVisitor {
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitSingleStatement(SqlBaseParser.SingleStatementContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitStatement(SqlBaseParser.StatementContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitKafkaStatement(SqlBaseParser.KafkaStatementContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitColumnStatement(SqlBaseParser.ColumnStatementContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitFromClause(SqlBaseParser.FromClauseContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitTableName(SqlBaseParser.TableNameContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitIdentifier(SqlBaseParser.IdentifierContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx) { return visitChildren(ctx); }
-}
\ No newline at end of file
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.interp b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.interp
deleted file mode 100644
index d8822ced22..0000000000
--- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.interp
+++ /dev/null
@@ -1,67 +0,0 @@
-token literal names:
-null
-'.'
-null
-null
-'TOPICS'
-'CONSUMERS'
-null
-null
-null
-null
-null
-null
-null
-null
-'/**/'
-null
-null
-
-token symbolic names:
-null
-null
-SHOW
-PATHS
-TOPICS
-CONSUMERS
-DATABASES
-TABLES
-SELECT
-FROM
-STRING
-IDENTIFIER
-BACKQUOTED_IDENTIFIER
-SIMPLE_COMMENT
-BRACKETED_EMPTY_COMMENT
-BRACKETED_COMMENT
-WS
-
-rule names:
-T__0
-SHOW
-PATHS
-TOPICS
-CONSUMERS
-DATABASES
-TABLES
-SELECT
-FROM
-DIGIT
-LETTER
-STRING
-IDENTIFIER
-BACKQUOTED_IDENTIFIER
-SIMPLE_COMMENT
-BRACKETED_EMPTY_COMMENT
-BRACKETED_COMMENT
-WS
-
-channel names:
-DEFAULT_TOKEN_CHANNEL
-HIDDEN
-
-mode names:
-DEFAULT_MODE
-
-atn:
-[4, 0, 16, 187, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 105, 8, 11, 10, 11, 12, 11, 108, 9, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 115, 8, 11, 10, 11, 12, 11, 118, 9, 11, 1, 11, 1, 11, 3, 11, 122, 8, 11, 1, 12, 1, 12, 1, 12, 4, 12, 127, 8, 12, 11, 12, 12, 12, 128, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 135, 8, 13, 10, 13, 12, 13, 138, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 146, 8, 14, 10, 14, 12, 14, 149, 9, 14, 1, 14, 3, 14, 152, 8, 14, 1, 14, 3, 14, 155, 8, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 171, 8, 16, 10, 16, 12, 16, 174, 9, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 4, 17, 182, 8, 17, 11, 17, 12, 17, 183, 1, 17, 1, 17, 1, 172, 0, 18, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 0, 21, 0, 23, 10, 25, 11, 27, 12, 29, 13, 31, 14, 33, 15, 35, 16, 1, 0, 24, 2, 0, 83, 83, 115, 115, 2, 0, 72, 72, 104, 104, 2, 0, 79, 79, 111, 111, 2, 0, 87, 87, 119, 119, 2, 0, 80, 80, 112, 112, 2, 0, 65, 65, 97, 97, 2, 0, 84, 84, 116, 116, 2, 0, 68, 68, 100, 100, 2, 0, 66, 66, 98, 98, 2, 0, 69, 69, 115, 115, 2, 0, 76, 76, 108, 108, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 70, 70, 102, 102, 2, 0, 82, 82, 114, 114, 2, 0, 77, 77, 109, 109, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 2, 0, 39, 39, 92, 92, 2, 0, 34, 34, 92, 92, 1, 0, 96, 96, 2, 0, 10, 10, 13, 13, 1, 0, 43, 43, 3, 0, 9, 10, 13, 13, 32, 32, 200, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 1, 37, 1, 0, 0, 0, 3, 39, 1, 0, 0, 0, 5, 44, 1, 0, 0, 0, 7, 50, 1, 0, 0, 0, 9, 57, 1, 0, 0, 0, 11, 67, 1, 0, 0, 0, 13, 77, 1, 0, 0, 0, 15, 84, 1, 0, 0, 0, 17, 91, 1, 0, 0, 0, 19, 96, 1, 0, 0, 0, 21, 98, 1, 0, 0, 0, 23, 121, 1, 0, 0, 0, 25, 126, 1, 0, 0, 0, 27, 130, 1, 0, 0, 0, 29, 141, 1, 0, 0, 0, 31, 158, 1, 0, 0, 0, 33, 165, 1, 0, 0, 0, 35, 181, 1, 0, 0, 0, 37, 38, 5, 46, 0, 0, 38, 2, 1, 0, 0, 0, 39, 40, 7, 0, 0, 0, 40, 41, 7, 1, 0, 0, 41, 42, 7, 2, 0, 0, 42, 43, 7, 3, 0, 0, 43, 4, 1, 0, 0, 0, 44, 45, 7, 4, 0, 0, 45, 46, 7, 5, 0, 0, 46, 47, 7, 6, 0, 0, 47, 48, 7, 1, 0, 0, 48, 49, 7, 0, 0, 0, 49, 6, 1, 0, 0, 0, 50, 51, 5, 84, 0, 0, 51, 52, 5, 79, 0, 0, 52, 53, 5, 80, 0, 0, 53, 54, 5, 73, 0, 0, 54, 55, 5, 67, 0, 0, 55, 56, 5, 83, 0, 0, 56, 8, 1, 0, 0, 0, 57, 58, 5, 67, 0, 0, 58, 59, 5, 79, 0, 0, 59, 60, 5, 78, 0, 0, 60, 61, 5, 83, 0, 0, 61, 62, 5, 85, 0, 0, 62, 63, 5, 77, 0, 0, 63, 64, 5, 69, 0, 0, 64, 65, 5, 82, 0, 0, 65, 66, 5, 83, 0, 0, 66, 10, 1, 0, 0, 0, 67, 68, 7, 7, 0, 0, 68, 69, 7, 5, 0, 0, 69, 70, 7, 6, 0, 0, 70, 71, 7, 5, 0, 0, 71, 72, 7, 8, 0, 0, 72, 73, 7, 5, 0, 0, 73, 74, 7, 0, 0, 0, 74, 75, 7, 9, 0, 0, 75, 76, 7, 0, 0, 0, 76, 12, 1, 0, 0, 0, 77, 78, 7, 6, 0, 0, 78, 79, 7, 5, 0, 0, 79, 80, 7, 8, 0, 0, 80, 81, 7, 10, 0, 0, 81, 82, 7, 11, 0, 0, 82, 83, 7, 0, 0, 0, 83, 14, 1, 0, 0, 0, 84, 85, 7, 0, 0, 0, 85, 86, 7, 11, 0, 0, 86, 87, 7, 10, 0, 0, 87, 88, 7, 11, 0, 0, 88, 89, 7, 12, 0, 0, 89, 90, 7, 6, 0, 0, 90, 16, 1, 0, 0, 0, 91, 92, 7, 13, 0, 0, 92, 93, 7, 14, 0, 0, 93, 94, 7, 2, 0, 0, 94, 95, 7, 15, 0, 0, 95, 18, 1, 0, 0, 0, 96, 97, 7, 16, 0, 0, 97, 20, 1, 0, 0, 0, 98, 99, 7, 17, 0, 0, 99, 22, 1, 0, 0, 0, 100, 106, 5, 39, 0, 0, 101, 105, 8, 18, 0, 0, 102, 103, 5, 92, 0, 0, 103, 105, 9, 0, 0, 0, 104, 101, 1, 0, 0, 0, 104, 102, 1, 0, 0, 0, 105, 108, 1, 0, 0, 0, 106, 104, 1, 0, 0, 0, 106, 107, 1, 0, 0, 0, 107, 109, 1, 0, 0, 0, 108, 106, 1, 0, 0, 0, 109, 122, 5, 39, 0, 0, 110, 116, 5, 34, 0, 0, 111, 115, 8, 19, 0, 0, 112, 113, 5, 92, 0, 0, 113, 115, 9, 0, 0, 0, 114, 111, 1, 0, 0, 0, 114, 112, 1, 0, 0, 0, 115, 118, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 117, 1, 0, 0, 0, 117, 119, 1, 0, 0, 0, 118, 116, 1, 0, 0, 0, 119, 122, 5, 34, 0, 0, 120, 122, 5, 42, 0, 0, 121, 100, 1, 0, 0, 0, 121, 110, 1, 0, 0, 0, 121, 120, 1, 0, 0, 0, 122, 24, 1, 0, 0, 0, 123, 127, 3, 21, 10, 0, 124, 127, 3, 19, 9, 0, 125, 127, 5, 95, 0, 0, 126, 123, 1, 0, 0, 0, 126, 124, 1, 0, 0, 0, 126, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 126, 1, 0, 0, 0, 128, 129, 1, 0, 0, 0, 129, 26, 1, 0, 0, 0, 130, 136, 5, 96, 0, 0, 131, 135, 8, 20, 0, 0, 132, 133, 5, 96, 0, 0, 133, 135, 5, 96, 0, 0, 134, 131, 1, 0, 0, 0, 134, 132, 1, 0, 0, 0, 135, 138, 1, 0, 0, 0, 136, 134, 1, 0, 0, 0, 136, 137, 1, 0, 0, 0, 137, 139, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 139, 140, 5, 96, 0, 0, 140, 28, 1, 0, 0, 0, 141, 142, 5, 45, 0, 0, 142, 143, 5, 45, 0, 0, 143, 147, 1, 0, 0, 0, 144, 146, 8, 21, 0, 0, 145, 144, 1, 0, 0, 0, 146, 149, 1, 0, 0, 0, 147, 145, 1, 0, 0, 0, 147, 148, 1, 0, 0, 0, 148, 151, 1, 0, 0, 0, 149, 147, 1, 0, 0, 0, 150, 152, 5, 13, 0, 0, 151, 150, 1, 0, 0, 0, 151, 152, 1, 0, 0, 0, 152, 154, 1, 0, 0, 0, 153, 155, 5, 10, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 6, 14, 0, 0, 157, 30, 1, 0, 0, 0, 158, 159, 5, 47, 0, 0, 159, 160, 5, 42, 0, 0, 160, 161, 5, 42, 0, 0, 161, 162, 5, 47, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, 6, 15, 0, 0, 164, 32, 1, 0, 0, 0, 165, 166, 5, 47, 0, 0, 166, 167, 5, 42, 0, 0, 167, 168, 1, 0, 0, 0, 168, 172, 8, 22, 0, 0, 169, 171, 9, 0, 0, 0, 170, 169, 1, 0, 0, 0, 171, 174, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 175, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 175, 176, 5, 42, 0, 0, 176, 177, 5, 47, 0, 0, 177, 178, 1, 0, 0, 0, 178, 179, 6, 16, 0, 0, 179, 34, 1, 0, 0, 0, 180, 182, 7, 23, 0, 0, 181, 180, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 6, 17, 0, 0, 186, 36, 1, 0, 0, 0, 15, 0, 104, 106, 114, 116, 121, 126, 128, 134, 136, 147, 151, 154, 172, 183, 1, 0, 1, 0]
\ No newline at end of file
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.java
deleted file mode 100644
index b7fe9696af..0000000000
--- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.java
+++ /dev/null
@@ -1,243 +0,0 @@
-// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0
-package io.edurt.datacap.sql.parser;
-import org.antlr.v4.runtime.Lexer;
-import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.Token;
-import org.antlr.v4.runtime.TokenStream;
-import org.antlr.v4.runtime.*;
-import org.antlr.v4.runtime.atn.*;
-import org.antlr.v4.runtime.dfa.DFA;
-import org.antlr.v4.runtime.misc.*;
-
-@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"})
-public class SqlBaseLexer extends Lexer {
- static { RuntimeMetaData.checkVersion("4.12.0", RuntimeMetaData.VERSION); }
-
- protected static final DFA[] _decisionToDFA;
- protected static final PredictionContextCache _sharedContextCache =
- new PredictionContextCache();
- public static final int
- T__0=1, SHOW=2, PATHS=3, TOPICS=4, CONSUMERS=5, DATABASES=6, TABLES=7,
- SELECT=8, FROM=9, STRING=10, IDENTIFIER=11, BACKQUOTED_IDENTIFIER=12,
- SIMPLE_COMMENT=13, BRACKETED_EMPTY_COMMENT=14, BRACKETED_COMMENT=15, WS=16;
- public static String[] channelNames = {
- "DEFAULT_TOKEN_CHANNEL", "HIDDEN"
- };
-
- public static String[] modeNames = {
- "DEFAULT_MODE"
- };
-
- private static String[] makeRuleNames() {
- return new String[] {
- "T__0", "SHOW", "PATHS", "TOPICS", "CONSUMERS", "DATABASES", "TABLES",
- "SELECT", "FROM", "DIGIT", "LETTER", "STRING", "IDENTIFIER", "BACKQUOTED_IDENTIFIER",
- "SIMPLE_COMMENT", "BRACKETED_EMPTY_COMMENT", "BRACKETED_COMMENT", "WS"
- };
- }
- public static final String[] ruleNames = makeRuleNames();
-
- private static String[] makeLiteralNames() {
- return new String[] {
- null, "'.'", null, null, "'TOPICS'", "'CONSUMERS'", null, null, null,
- null, null, null, null, null, "'/**/'"
- };
- }
- private static final String[] _LITERAL_NAMES = makeLiteralNames();
- private static String[] makeSymbolicNames() {
- return new String[] {
- null, null, "SHOW", "PATHS", "TOPICS", "CONSUMERS", "DATABASES", "TABLES",
- "SELECT", "FROM", "STRING", "IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT",
- "BRACKETED_EMPTY_COMMENT", "BRACKETED_COMMENT", "WS"
- };
- }
- private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
- public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
-
- /**
- * @deprecated Use {@link #VOCABULARY} instead.
- */
- @Deprecated
- public static final String[] tokenNames;
- static {
- tokenNames = new String[_SYMBOLIC_NAMES.length];
- for (int i = 0; i < tokenNames.length; i++) {
- tokenNames[i] = VOCABULARY.getLiteralName(i);
- if (tokenNames[i] == null) {
- tokenNames[i] = VOCABULARY.getSymbolicName(i);
- }
-
- if (tokenNames[i] == null) {
- tokenNames[i] = "";
- }
- }
- }
-
- @Override
- @Deprecated
- public String[] getTokenNames() {
- return tokenNames;
- }
-
- @Override
-
- public Vocabulary getVocabulary() {
- return VOCABULARY;
- }
-
-
- public SqlBaseLexer(CharStream input) {
- super(input);
- _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
- }
-
- @Override
- public String getGrammarFileName() { return "SqlBase.g4"; }
-
- @Override
- public String[] getRuleNames() { return ruleNames; }
-
- @Override
- public String getSerializedATN() { return _serializedATN; }
-
- @Override
- public String[] getChannelNames() { return channelNames; }
-
- @Override
- public String[] getModeNames() { return modeNames; }
-
- @Override
- public ATN getATN() { return _ATN; }
-
- public static final String _serializedATN =
- "\u0004\u0000\u0010\u00bb\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002"+
- "\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002"+
- "\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002"+
- "\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002"+
- "\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e"+
- "\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011"+
- "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+
- "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+
- "\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
- "\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+
- "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+
- "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+
- "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+
- "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+
- "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+
- "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\n\u0001"+
- "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000bi\b\u000b"+
- "\n\u000b\f\u000bl\t\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+
- "\u0001\u000b\u0005\u000bs\b\u000b\n\u000b\f\u000bv\t\u000b\u0001\u000b"+
- "\u0001\u000b\u0003\u000bz\b\u000b\u0001\f\u0001\f\u0001\f\u0004\f\u007f"+
- "\b\f\u000b\f\f\f\u0080\u0001\r\u0001\r\u0001\r\u0001\r\u0005\r\u0087\b"+
- "\r\n\r\f\r\u008a\t\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+
- "\u0001\u000e\u0005\u000e\u0092\b\u000e\n\u000e\f\u000e\u0095\t\u000e\u0001"+
- "\u000e\u0003\u000e\u0098\b\u000e\u0001\u000e\u0003\u000e\u009b\b\u000e"+
- "\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+
- "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010"+
- "\u0001\u0010\u0001\u0010\u0005\u0010\u00ab\b\u0010\n\u0010\f\u0010\u00ae"+
- "\t\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+
- "\u0011\u0004\u0011\u00b6\b\u0011\u000b\u0011\f\u0011\u00b7\u0001\u0011"+
- "\u0001\u0011\u0001\u00ac\u0000\u0012\u0001\u0001\u0003\u0002\u0005\u0003"+
- "\u0007\u0004\t\u0005\u000b\u0006\r\u0007\u000f\b\u0011\t\u0013\u0000\u0015"+
- "\u0000\u0017\n\u0019\u000b\u001b\f\u001d\r\u001f\u000e!\u000f#\u0010\u0001"+
- "\u0000\u0018\u0002\u0000SSss\u0002\u0000HHhh\u0002\u0000OOoo\u0002\u0000"+
- "WWww\u0002\u0000PPpp\u0002\u0000AAaa\u0002\u0000TTtt\u0002\u0000DDdd\u0002"+
- "\u0000BBbb\u0002\u0000EEss\u0002\u0000LLll\u0002\u0000EEee\u0002\u0000"+
- "CCcc\u0002\u0000FFff\u0002\u0000RRrr\u0002\u0000MMmm\u0001\u000009\u0002"+
- "\u0000AZaz\u0002\u0000\'\'\\\\\u0002\u0000\"\"\\\\\u0001\u0000``\u0002"+
- "\u0000\n\n\r\r\u0001\u0000++\u0003\u0000\t\n\r\r \u00c8\u0000\u0001\u0001"+
- "\u0000\u0000\u0000\u0000\u0003\u0001\u0000\u0000\u0000\u0000\u0005\u0001"+
- "\u0000\u0000\u0000\u0000\u0007\u0001\u0000\u0000\u0000\u0000\t\u0001\u0000"+
- "\u0000\u0000\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000\u0000"+
- "\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000"+
- "\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000"+
- "\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001\u0000\u0000"+
- "\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000\u0000\u0000"+
- "\u0000#\u0001\u0000\u0000\u0000\u0001%\u0001\u0000\u0000\u0000\u0003\'"+
- "\u0001\u0000\u0000\u0000\u0005,\u0001\u0000\u0000\u0000\u00072\u0001\u0000"+
- "\u0000\u0000\t9\u0001\u0000\u0000\u0000\u000bC\u0001\u0000\u0000\u0000"+
- "\rM\u0001\u0000\u0000\u0000\u000fT\u0001\u0000\u0000\u0000\u0011[\u0001"+
- "\u0000\u0000\u0000\u0013`\u0001\u0000\u0000\u0000\u0015b\u0001\u0000\u0000"+
- "\u0000\u0017y\u0001\u0000\u0000\u0000\u0019~\u0001\u0000\u0000\u0000\u001b"+
- "\u0082\u0001\u0000\u0000\u0000\u001d\u008d\u0001\u0000\u0000\u0000\u001f"+
- "\u009e\u0001\u0000\u0000\u0000!\u00a5\u0001\u0000\u0000\u0000#\u00b5\u0001"+
- "\u0000\u0000\u0000%&\u0005.\u0000\u0000&\u0002\u0001\u0000\u0000\u0000"+
- "\'(\u0007\u0000\u0000\u0000()\u0007\u0001\u0000\u0000)*\u0007\u0002\u0000"+
- "\u0000*+\u0007\u0003\u0000\u0000+\u0004\u0001\u0000\u0000\u0000,-\u0007"+
- "\u0004\u0000\u0000-.\u0007\u0005\u0000\u0000./\u0007\u0006\u0000\u0000"+
- "/0\u0007\u0001\u0000\u000001\u0007\u0000\u0000\u00001\u0006\u0001\u0000"+
- "\u0000\u000023\u0005T\u0000\u000034\u0005O\u0000\u000045\u0005P\u0000"+
- "\u000056\u0005I\u0000\u000067\u0005C\u0000\u000078\u0005S\u0000\u0000"+
- "8\b\u0001\u0000\u0000\u00009:\u0005C\u0000\u0000:;\u0005O\u0000\u0000"+
- ";<\u0005N\u0000\u0000<=\u0005S\u0000\u0000=>\u0005U\u0000\u0000>?\u0005"+
- "M\u0000\u0000?@\u0005E\u0000\u0000@A\u0005R\u0000\u0000AB\u0005S\u0000"+
- "\u0000B\n\u0001\u0000\u0000\u0000CD\u0007\u0007\u0000\u0000DE\u0007\u0005"+
- "\u0000\u0000EF\u0007\u0006\u0000\u0000FG\u0007\u0005\u0000\u0000GH\u0007"+
- "\b\u0000\u0000HI\u0007\u0005\u0000\u0000IJ\u0007\u0000\u0000\u0000JK\u0007"+
- "\t\u0000\u0000KL\u0007\u0000\u0000\u0000L\f\u0001\u0000\u0000\u0000MN"+
- "\u0007\u0006\u0000\u0000NO\u0007\u0005\u0000\u0000OP\u0007\b\u0000\u0000"+
- "PQ\u0007\n\u0000\u0000QR\u0007\u000b\u0000\u0000RS\u0007\u0000\u0000\u0000"+
- "S\u000e\u0001\u0000\u0000\u0000TU\u0007\u0000\u0000\u0000UV\u0007\u000b"+
- "\u0000\u0000VW\u0007\n\u0000\u0000WX\u0007\u000b\u0000\u0000XY\u0007\f"+
- "\u0000\u0000YZ\u0007\u0006\u0000\u0000Z\u0010\u0001\u0000\u0000\u0000"+
- "[\\\u0007\r\u0000\u0000\\]\u0007\u000e\u0000\u0000]^\u0007\u0002\u0000"+
- "\u0000^_\u0007\u000f\u0000\u0000_\u0012\u0001\u0000\u0000\u0000`a\u0007"+
- "\u0010\u0000\u0000a\u0014\u0001\u0000\u0000\u0000bc\u0007\u0011\u0000"+
- "\u0000c\u0016\u0001\u0000\u0000\u0000dj\u0005\'\u0000\u0000ei\b\u0012"+
- "\u0000\u0000fg\u0005\\\u0000\u0000gi\t\u0000\u0000\u0000he\u0001\u0000"+
- "\u0000\u0000hf\u0001\u0000\u0000\u0000il\u0001\u0000\u0000\u0000jh\u0001"+
- "\u0000\u0000\u0000jk\u0001\u0000\u0000\u0000km\u0001\u0000\u0000\u0000"+
- "lj\u0001\u0000\u0000\u0000mz\u0005\'\u0000\u0000nt\u0005\"\u0000\u0000"+
- "os\b\u0013\u0000\u0000pq\u0005\\\u0000\u0000qs\t\u0000\u0000\u0000ro\u0001"+
- "\u0000\u0000\u0000rp\u0001\u0000\u0000\u0000sv\u0001\u0000\u0000\u0000"+
- "tr\u0001\u0000\u0000\u0000tu\u0001\u0000\u0000\u0000uw\u0001\u0000\u0000"+
- "\u0000vt\u0001\u0000\u0000\u0000wz\u0005\"\u0000\u0000xz\u0005*\u0000"+
- "\u0000yd\u0001\u0000\u0000\u0000yn\u0001\u0000\u0000\u0000yx\u0001\u0000"+
- "\u0000\u0000z\u0018\u0001\u0000\u0000\u0000{\u007f\u0003\u0015\n\u0000"+
- "|\u007f\u0003\u0013\t\u0000}\u007f\u0005_\u0000\u0000~{\u0001\u0000\u0000"+
- "\u0000~|\u0001\u0000\u0000\u0000~}\u0001\u0000\u0000\u0000\u007f\u0080"+
- "\u0001\u0000\u0000\u0000\u0080~\u0001\u0000\u0000\u0000\u0080\u0081\u0001"+
- "\u0000\u0000\u0000\u0081\u001a\u0001\u0000\u0000\u0000\u0082\u0088\u0005"+
- "`\u0000\u0000\u0083\u0087\b\u0014\u0000\u0000\u0084\u0085\u0005`\u0000"+
- "\u0000\u0085\u0087\u0005`\u0000\u0000\u0086\u0083\u0001\u0000\u0000\u0000"+
- "\u0086\u0084\u0001\u0000\u0000\u0000\u0087\u008a\u0001\u0000\u0000\u0000"+
- "\u0088\u0086\u0001\u0000\u0000\u0000\u0088\u0089\u0001\u0000\u0000\u0000"+
- "\u0089\u008b\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000\u0000\u0000"+
- "\u008b\u008c\u0005`\u0000\u0000\u008c\u001c\u0001\u0000\u0000\u0000\u008d"+
- "\u008e\u0005-\u0000\u0000\u008e\u008f\u0005-\u0000\u0000\u008f\u0093\u0001"+
- "\u0000\u0000\u0000\u0090\u0092\b\u0015\u0000\u0000\u0091\u0090\u0001\u0000"+
- "\u0000\u0000\u0092\u0095\u0001\u0000\u0000\u0000\u0093\u0091\u0001\u0000"+
- "\u0000\u0000\u0093\u0094\u0001\u0000\u0000\u0000\u0094\u0097\u0001\u0000"+
- "\u0000\u0000\u0095\u0093\u0001\u0000\u0000\u0000\u0096\u0098\u0005\r\u0000"+
- "\u0000\u0097\u0096\u0001\u0000\u0000\u0000\u0097\u0098\u0001\u0000\u0000"+
- "\u0000\u0098\u009a\u0001\u0000\u0000\u0000\u0099\u009b\u0005\n\u0000\u0000"+
- "\u009a\u0099\u0001\u0000\u0000\u0000\u009a\u009b\u0001\u0000\u0000\u0000"+
- "\u009b\u009c\u0001\u0000\u0000\u0000\u009c\u009d\u0006\u000e\u0000\u0000"+
- "\u009d\u001e\u0001\u0000\u0000\u0000\u009e\u009f\u0005/\u0000\u0000\u009f"+
- "\u00a0\u0005*\u0000\u0000\u00a0\u00a1\u0005*\u0000\u0000\u00a1\u00a2\u0005"+
- "/\u0000\u0000\u00a2\u00a3\u0001\u0000\u0000\u0000\u00a3\u00a4\u0006\u000f"+
- "\u0000\u0000\u00a4 \u0001\u0000\u0000\u0000\u00a5\u00a6\u0005/\u0000\u0000"+
- "\u00a6\u00a7\u0005*\u0000\u0000\u00a7\u00a8\u0001\u0000\u0000\u0000\u00a8"+
- "\u00ac\b\u0016\u0000\u0000\u00a9\u00ab\t\u0000\u0000\u0000\u00aa\u00a9"+
- "\u0001\u0000\u0000\u0000\u00ab\u00ae\u0001\u0000\u0000\u0000\u00ac\u00ad"+
- "\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000\u0000\u0000\u00ad\u00af"+
- "\u0001\u0000\u0000\u0000\u00ae\u00ac\u0001\u0000\u0000\u0000\u00af\u00b0"+
- "\u0005*\u0000\u0000\u00b0\u00b1\u0005/\u0000\u0000\u00b1\u00b2\u0001\u0000"+
- "\u0000\u0000\u00b2\u00b3\u0006\u0010\u0000\u0000\u00b3\"\u0001\u0000\u0000"+
- "\u0000\u00b4\u00b6\u0007\u0017\u0000\u0000\u00b5\u00b4\u0001\u0000\u0000"+
- "\u0000\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000"+
- "\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000"+
- "\u0000\u00b9\u00ba\u0006\u0011\u0000\u0000\u00ba$\u0001\u0000\u0000\u0000"+
- "\u000f\u0000hjrty~\u0080\u0086\u0088\u0093\u0097\u009a\u00ac\u00b7\u0001"+
- "\u0000\u0001\u0000";
- public static final ATN _ATN =
- new ATNDeserializer().deserialize(_serializedATN.toCharArray());
- static {
- _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
- for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
- _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
- }
- }
-}
\ No newline at end of file
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseListener.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseListener.java
deleted file mode 100644
index ec47bb84eb..0000000000
--- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseListener.java
+++ /dev/null
@@ -1,130 +0,0 @@
-// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0
-package io.edurt.datacap.sql.parser;
-import org.antlr.v4.runtime.tree.ParseTreeListener;
-
-/**
- * This interface defines a complete listener for a parse tree produced by
- * {@link SqlBaseParser}.
- */
-public interface SqlBaseListener extends ParseTreeListener {
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#singleStatement}.
- * @param ctx the parse tree
- */
- void enterSingleStatement(SqlBaseParser.SingleStatementContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#singleStatement}.
- * @param ctx the parse tree
- */
- void exitSingleStatement(SqlBaseParser.SingleStatementContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#statement}.
- * @param ctx the parse tree
- */
- void enterStatement(SqlBaseParser.StatementContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#statement}.
- * @param ctx the parse tree
- */
- void exitStatement(SqlBaseParser.StatementContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#kafkaQueryTopicStatement}.
- * @param ctx the parse tree
- */
- void enterKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#kafkaQueryTopicStatement}.
- * @param ctx the parse tree
- */
- void exitKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#kafkaQueryConsumerStatement}.
- * @param ctx the parse tree
- */
- void enterKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#kafkaQueryConsumerStatement}.
- * @param ctx the parse tree
- */
- void exitKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#kafkaQueryStatement}.
- * @param ctx the parse tree
- */
- void enterKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#kafkaQueryStatement}.
- * @param ctx the parse tree
- */
- void exitKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#kafkaStatement}.
- * @param ctx the parse tree
- */
- void enterKafkaStatement(SqlBaseParser.KafkaStatementContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#kafkaStatement}.
- * @param ctx the parse tree
- */
- void exitKafkaStatement(SqlBaseParser.KafkaStatementContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#childPathStatement}.
- * @param ctx the parse tree
- */
- void enterChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#childPathStatement}.
- * @param ctx the parse tree
- */
- void exitChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#columnStatement}.
- * @param ctx the parse tree
- */
- void enterColumnStatement(SqlBaseParser.ColumnStatementContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#columnStatement}.
- * @param ctx the parse tree
- */
- void exitColumnStatement(SqlBaseParser.ColumnStatementContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#fromClause}.
- * @param ctx the parse tree
- */
- void enterFromClause(SqlBaseParser.FromClauseContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#fromClause}.
- * @param ctx the parse tree
- */
- void exitFromClause(SqlBaseParser.FromClauseContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#tableName}.
- * @param ctx the parse tree
- */
- void enterTableName(SqlBaseParser.TableNameContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#tableName}.
- * @param ctx the parse tree
- */
- void exitTableName(SqlBaseParser.TableNameContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#identifier}.
- * @param ctx the parse tree
- */
- void enterIdentifier(SqlBaseParser.IdentifierContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#identifier}.
- * @param ctx the parse tree
- */
- void exitIdentifier(SqlBaseParser.IdentifierContext ctx);
- /**
- * Enter a parse tree produced by {@link SqlBaseParser#quotedIdentifier}.
- * @param ctx the parse tree
- */
- void enterQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx);
- /**
- * Exit a parse tree produced by {@link SqlBaseParser#quotedIdentifier}.
- * @param ctx the parse tree
- */
- void exitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx);
-}
\ No newline at end of file
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseParser.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseParser.java
deleted file mode 100644
index ed18dea1e4..0000000000
--- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseParser.java
+++ /dev/null
@@ -1,911 +0,0 @@
-// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0
-package io.edurt.datacap.sql.parser;
-import org.antlr.v4.runtime.atn.*;
-import org.antlr.v4.runtime.dfa.DFA;
-import org.antlr.v4.runtime.*;
-import org.antlr.v4.runtime.misc.*;
-import org.antlr.v4.runtime.tree.*;
-import java.util.List;
-import java.util.Iterator;
-import java.util.ArrayList;
-
-@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"})
-public class SqlBaseParser extends Parser {
- static { RuntimeMetaData.checkVersion("4.12.0", RuntimeMetaData.VERSION); }
-
- protected static final DFA[] _decisionToDFA;
- protected static final PredictionContextCache _sharedContextCache =
- new PredictionContextCache();
- public static final int
- T__0=1, SHOW=2, PATHS=3, TOPICS=4, CONSUMERS=5, DATABASES=6, TABLES=7,
- SELECT=8, FROM=9, STRING=10, IDENTIFIER=11, BACKQUOTED_IDENTIFIER=12,
- SIMPLE_COMMENT=13, BRACKETED_EMPTY_COMMENT=14, BRACKETED_COMMENT=15, WS=16;
- public static final int
- RULE_singleStatement = 0, RULE_statement = 1, RULE_kafkaQueryTopicStatement = 2,
- RULE_kafkaQueryConsumerStatement = 3, RULE_kafkaQueryStatement = 4, RULE_kafkaStatement = 5,
- RULE_childPathStatement = 6, RULE_columnStatement = 7, RULE_fromClause = 8,
- RULE_tableName = 9, RULE_identifier = 10, RULE_quotedIdentifier = 11;
- private static String[] makeRuleNames() {
- return new String[] {
- "singleStatement", "statement", "kafkaQueryTopicStatement", "kafkaQueryConsumerStatement",
- "kafkaQueryStatement", "kafkaStatement", "childPathStatement", "columnStatement",
- "fromClause", "tableName", "identifier", "quotedIdentifier"
- };
- }
- public static final String[] ruleNames = makeRuleNames();
-
- private static String[] makeLiteralNames() {
- return new String[] {
- null, "'.'", null, null, "'TOPICS'", "'CONSUMERS'", null, null, null,
- null, null, null, null, null, "'/**/'"
- };
- }
- private static final String[] _LITERAL_NAMES = makeLiteralNames();
- private static String[] makeSymbolicNames() {
- return new String[] {
- null, null, "SHOW", "PATHS", "TOPICS", "CONSUMERS", "DATABASES", "TABLES",
- "SELECT", "FROM", "STRING", "IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT",
- "BRACKETED_EMPTY_COMMENT", "BRACKETED_COMMENT", "WS"
- };
- }
- private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
- public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
-
- /**
- * @deprecated Use {@link #VOCABULARY} instead.
- */
- @Deprecated
- public static final String[] tokenNames;
- static {
- tokenNames = new String[_SYMBOLIC_NAMES.length];
- for (int i = 0; i < tokenNames.length; i++) {
- tokenNames[i] = VOCABULARY.getLiteralName(i);
- if (tokenNames[i] == null) {
- tokenNames[i] = VOCABULARY.getSymbolicName(i);
- }
-
- if (tokenNames[i] == null) {
- tokenNames[i] = "";
- }
- }
- }
-
- @Override
- @Deprecated
- public String[] getTokenNames() {
- return tokenNames;
- }
-
- @Override
-
- public Vocabulary getVocabulary() {
- return VOCABULARY;
- }
-
- @Override
- public String getGrammarFileName() { return "SqlBase.g4"; }
-
- @Override
- public String[] getRuleNames() { return ruleNames; }
-
- @Override
- public String getSerializedATN() { return _serializedATN; }
-
- @Override
- public ATN getATN() { return _ATN; }
-
- public SqlBaseParser(TokenStream input) {
- super(input);
- _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class SingleStatementContext extends ParserRuleContext {
- public List statement() {
- return getRuleContexts(StatementContext.class);
- }
- public StatementContext statement(int i) {
- return getRuleContext(StatementContext.class,i);
- }
- public SingleStatementContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_singleStatement; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterSingleStatement(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitSingleStatement(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitSingleStatement(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final SingleStatementContext singleStatement() throws RecognitionException {
- SingleStatementContext _localctx = new SingleStatementContext(_ctx, getState());
- enterRule(_localctx, 0, RULE_singleStatement);
- int _la;
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(27);
- _errHandler.sync(this);
- _la = _input.LA(1);
- while (_la==SHOW || _la==SELECT) {
- {
- {
- setState(24);
- statement();
- }
- }
- setState(29);
- _errHandler.sync(this);
- _la = _input.LA(1);
- }
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class StatementContext extends ParserRuleContext {
- public TerminalNode SHOW() { return getToken(SqlBaseParser.SHOW, 0); }
- public ChildPathStatementContext childPathStatement() {
- return getRuleContext(ChildPathStatementContext.class,0);
- }
- public TerminalNode SELECT() { return getToken(SqlBaseParser.SELECT, 0); }
- public ColumnStatementContext columnStatement() {
- return getRuleContext(ColumnStatementContext.class,0);
- }
- public FromClauseContext fromClause() {
- return getRuleContext(FromClauseContext.class,0);
- }
- public KafkaStatementContext kafkaStatement() {
- return getRuleContext(KafkaStatementContext.class,0);
- }
- public StatementContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_statement; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterStatement(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitStatement(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitStatement(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final StatementContext statement() throws RecognitionException {
- StatementContext _localctx = new StatementContext(_ctx, getState());
- enterRule(_localctx, 2, RULE_statement);
- try {
- setState(37);
- _errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) {
- case 1:
- enterOuterAlt(_localctx, 1);
- {
- setState(30);
- match(SHOW);
- setState(31);
- childPathStatement();
- }
- break;
- case 2:
- enterOuterAlt(_localctx, 2);
- {
- setState(32);
- match(SELECT);
- setState(33);
- columnStatement();
- setState(34);
- fromClause();
- }
- break;
- case 3:
- enterOuterAlt(_localctx, 3);
- {
- setState(36);
- kafkaStatement();
- }
- break;
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class KafkaQueryTopicStatementContext extends ParserRuleContext {
- public TerminalNode SHOW() { return getToken(SqlBaseParser.SHOW, 0); }
- public TerminalNode TOPICS() { return getToken(SqlBaseParser.TOPICS, 0); }
- public TerminalNode DATABASES() { return getToken(SqlBaseParser.DATABASES, 0); }
- public KafkaQueryTopicStatementContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_kafkaQueryTopicStatement; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterKafkaQueryTopicStatement(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitKafkaQueryTopicStatement(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitKafkaQueryTopicStatement(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final KafkaQueryTopicStatementContext kafkaQueryTopicStatement() throws RecognitionException {
- KafkaQueryTopicStatementContext _localctx = new KafkaQueryTopicStatementContext(_ctx, getState());
- enterRule(_localctx, 4, RULE_kafkaQueryTopicStatement);
- try {
- setState(43);
- _errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) {
- case 1:
- enterOuterAlt(_localctx, 1);
- {
- setState(39);
- match(SHOW);
- setState(40);
- match(TOPICS);
- }
- break;
- case 2:
- enterOuterAlt(_localctx, 2);
- {
- setState(41);
- match(SHOW);
- setState(42);
- match(DATABASES);
- }
- break;
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class KafkaQueryConsumerStatementContext extends ParserRuleContext {
- public TerminalNode SHOW() { return getToken(SqlBaseParser.SHOW, 0); }
- public TerminalNode CONSUMERS() { return getToken(SqlBaseParser.CONSUMERS, 0); }
- public FromClauseContext fromClause() {
- return getRuleContext(FromClauseContext.class,0);
- }
- public TerminalNode TABLES() { return getToken(SqlBaseParser.TABLES, 0); }
- public KafkaQueryConsumerStatementContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_kafkaQueryConsumerStatement; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterKafkaQueryConsumerStatement(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitKafkaQueryConsumerStatement(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitKafkaQueryConsumerStatement(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final KafkaQueryConsumerStatementContext kafkaQueryConsumerStatement() throws RecognitionException {
- KafkaQueryConsumerStatementContext _localctx = new KafkaQueryConsumerStatementContext(_ctx, getState());
- enterRule(_localctx, 6, RULE_kafkaQueryConsumerStatement);
- try {
- setState(55);
- _errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) {
- case 1:
- enterOuterAlt(_localctx, 1);
- {
- setState(45);
- match(SHOW);
- setState(46);
- match(CONSUMERS);
- }
- break;
- case 2:
- enterOuterAlt(_localctx, 2);
- {
- setState(47);
- match(SHOW);
- setState(48);
- match(CONSUMERS);
- setState(49);
- fromClause();
- }
- break;
- case 3:
- enterOuterAlt(_localctx, 3);
- {
- setState(50);
- match(SHOW);
- setState(51);
- match(TABLES);
- }
- break;
- case 4:
- enterOuterAlt(_localctx, 4);
- {
- setState(52);
- match(SHOW);
- setState(53);
- match(TABLES);
- setState(54);
- fromClause();
- }
- break;
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class KafkaQueryStatementContext extends ParserRuleContext {
- public KafkaQueryTopicStatementContext kafkaQueryTopicStatement() {
- return getRuleContext(KafkaQueryTopicStatementContext.class,0);
- }
- public KafkaQueryConsumerStatementContext kafkaQueryConsumerStatement() {
- return getRuleContext(KafkaQueryConsumerStatementContext.class,0);
- }
- public KafkaQueryStatementContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_kafkaQueryStatement; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterKafkaQueryStatement(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitKafkaQueryStatement(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitKafkaQueryStatement(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final KafkaQueryStatementContext kafkaQueryStatement() throws RecognitionException {
- KafkaQueryStatementContext _localctx = new KafkaQueryStatementContext(_ctx, getState());
- enterRule(_localctx, 8, RULE_kafkaQueryStatement);
- try {
- setState(59);
- _errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) {
- case 1:
- enterOuterAlt(_localctx, 1);
- {
- setState(57);
- kafkaQueryTopicStatement();
- }
- break;
- case 2:
- enterOuterAlt(_localctx, 2);
- {
- setState(58);
- kafkaQueryConsumerStatement();
- }
- break;
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class KafkaStatementContext extends ParserRuleContext {
- public KafkaQueryStatementContext kafkaQueryStatement() {
- return getRuleContext(KafkaQueryStatementContext.class,0);
- }
- public KafkaStatementContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_kafkaStatement; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterKafkaStatement(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitKafkaStatement(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitKafkaStatement(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final KafkaStatementContext kafkaStatement() throws RecognitionException {
- KafkaStatementContext _localctx = new KafkaStatementContext(_ctx, getState());
- enterRule(_localctx, 10, RULE_kafkaStatement);
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(61);
- kafkaQueryStatement();
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class ChildPathStatementContext extends ParserRuleContext {
- public TerminalNode PATHS() { return getToken(SqlBaseParser.PATHS, 0); }
- public FromClauseContext fromClause() {
- return getRuleContext(FromClauseContext.class,0);
- }
- public ChildPathStatementContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_childPathStatement; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterChildPathStatement(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitChildPathStatement(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitChildPathStatement(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final ChildPathStatementContext childPathStatement() throws RecognitionException {
- ChildPathStatementContext _localctx = new ChildPathStatementContext(_ctx, getState());
- enterRule(_localctx, 12, RULE_childPathStatement);
- try {
- setState(66);
- _errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) {
- case 1:
- enterOuterAlt(_localctx, 1);
- {
- setState(63);
- match(PATHS);
- }
- break;
- case 2:
- enterOuterAlt(_localctx, 2);
- {
- setState(64);
- match(PATHS);
- setState(65);
- fromClause();
- }
- break;
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class ColumnStatementContext extends ParserRuleContext {
- public IdentifierContext identifier() {
- return getRuleContext(IdentifierContext.class,0);
- }
- public ColumnStatementContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_columnStatement; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterColumnStatement(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitColumnStatement(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitColumnStatement(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final ColumnStatementContext columnStatement() throws RecognitionException {
- ColumnStatementContext _localctx = new ColumnStatementContext(_ctx, getState());
- enterRule(_localctx, 14, RULE_columnStatement);
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(68);
- identifier();
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class FromClauseContext extends ParserRuleContext {
- public TerminalNode FROM() { return getToken(SqlBaseParser.FROM, 0); }
- public TableNameContext tableName() {
- return getRuleContext(TableNameContext.class,0);
- }
- public FromClauseContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_fromClause; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFromClause(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFromClause(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitFromClause(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final FromClauseContext fromClause() throws RecognitionException {
- FromClauseContext _localctx = new FromClauseContext(_ctx, getState());
- enterRule(_localctx, 16, RULE_fromClause);
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(70);
- match(FROM);
- setState(71);
- tableName();
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class TableNameContext extends ParserRuleContext {
- public List identifier() {
- return getRuleContexts(IdentifierContext.class);
- }
- public IdentifierContext identifier(int i) {
- return getRuleContext(IdentifierContext.class,i);
- }
- public TableNameContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_tableName; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterTableName(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitTableName(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitTableName(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final TableNameContext tableName() throws RecognitionException {
- TableNameContext _localctx = new TableNameContext(_ctx, getState());
- enterRule(_localctx, 18, RULE_tableName);
- int _la;
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(73);
- identifier();
- setState(78);
- _errHandler.sync(this);
- _la = _input.LA(1);
- while (_la==T__0) {
- {
- {
- setState(74);
- match(T__0);
- setState(75);
- identifier();
- }
- }
- setState(80);
- _errHandler.sync(this);
- _la = _input.LA(1);
- }
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class IdentifierContext extends ParserRuleContext {
- public List IDENTIFIER() { return getTokens(SqlBaseParser.IDENTIFIER); }
- public TerminalNode IDENTIFIER(int i) {
- return getToken(SqlBaseParser.IDENTIFIER, i);
- }
- public List STRING() { return getTokens(SqlBaseParser.STRING); }
- public TerminalNode STRING(int i) {
- return getToken(SqlBaseParser.STRING, i);
- }
- public List quotedIdentifier() {
- return getRuleContexts(QuotedIdentifierContext.class);
- }
- public QuotedIdentifierContext quotedIdentifier(int i) {
- return getRuleContext(QuotedIdentifierContext.class,i);
- }
- public IdentifierContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_identifier; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterIdentifier(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitIdentifier(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitIdentifier(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final IdentifierContext identifier() throws RecognitionException {
- IdentifierContext _localctx = new IdentifierContext(_ctx, getState());
- enterRule(_localctx, 20, RULE_identifier);
- int _la;
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(86);
- _errHandler.sync(this);
- _la = _input.LA(1);
- while ((((_la) & ~0x3f) == 0 && ((1L << _la) & 7168L) != 0)) {
- {
- setState(84);
- _errHandler.sync(this);
- switch (_input.LA(1)) {
- case IDENTIFIER:
- {
- setState(81);
- match(IDENTIFIER);
- }
- break;
- case STRING:
- {
- setState(82);
- match(STRING);
- }
- break;
- case BACKQUOTED_IDENTIFIER:
- {
- setState(83);
- quotedIdentifier();
- }
- break;
- default:
- throw new NoViableAltException(this);
- }
- }
- setState(88);
- _errHandler.sync(this);
- _la = _input.LA(1);
- }
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class QuotedIdentifierContext extends ParserRuleContext {
- public TerminalNode BACKQUOTED_IDENTIFIER() { return getToken(SqlBaseParser.BACKQUOTED_IDENTIFIER, 0); }
- public QuotedIdentifierContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_quotedIdentifier; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterQuotedIdentifier(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitQuotedIdentifier(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitQuotedIdentifier(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final QuotedIdentifierContext quotedIdentifier() throws RecognitionException {
- QuotedIdentifierContext _localctx = new QuotedIdentifierContext(_ctx, getState());
- enterRule(_localctx, 22, RULE_quotedIdentifier);
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(89);
- match(BACKQUOTED_IDENTIFIER);
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- public static final String _serializedATN =
- "\u0004\u0001\u0010\\\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+
- "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+
- "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+
- "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0001"+
- "\u0000\u0005\u0000\u001a\b\u0000\n\u0000\f\u0000\u001d\t\u0000\u0001\u0001"+
- "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+
- "\u0003\u0001&\b\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+
- "\u0003\u0002,\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
- "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
- "\u0003\u00038\b\u0003\u0001\u0004\u0001\u0004\u0003\u0004<\b\u0004\u0001"+
- "\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006C\b"+
- "\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t"+
- "\u0001\t\u0005\tM\b\t\n\t\f\tP\t\t\u0001\n\u0001\n\u0001\n\u0005\nU\b"+
- "\n\n\n\f\nX\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0000\u0000\f\u0000"+
- "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0000\u0000\\\u0000"+
- "\u001b\u0001\u0000\u0000\u0000\u0002%\u0001\u0000\u0000\u0000\u0004+\u0001"+
- "\u0000\u0000\u0000\u00067\u0001\u0000\u0000\u0000\b;\u0001\u0000\u0000"+
- "\u0000\n=\u0001\u0000\u0000\u0000\fB\u0001\u0000\u0000\u0000\u000eD\u0001"+
- "\u0000\u0000\u0000\u0010F\u0001\u0000\u0000\u0000\u0012I\u0001\u0000\u0000"+
- "\u0000\u0014V\u0001\u0000\u0000\u0000\u0016Y\u0001\u0000\u0000\u0000\u0018"+
- "\u001a\u0003\u0002\u0001\u0000\u0019\u0018\u0001\u0000\u0000\u0000\u001a"+
- "\u001d\u0001\u0000\u0000\u0000\u001b\u0019\u0001\u0000\u0000\u0000\u001b"+
- "\u001c\u0001\u0000\u0000\u0000\u001c\u0001\u0001\u0000\u0000\u0000\u001d"+
- "\u001b\u0001\u0000\u0000\u0000\u001e\u001f\u0005\u0002\u0000\u0000\u001f"+
- "&\u0003\f\u0006\u0000 !\u0005\b\u0000\u0000!\"\u0003\u000e\u0007\u0000"+
- "\"#\u0003\u0010\b\u0000#&\u0001\u0000\u0000\u0000$&\u0003\n\u0005\u0000"+
- "%\u001e\u0001\u0000\u0000\u0000% \u0001\u0000\u0000\u0000%$\u0001\u0000"+
- "\u0000\u0000&\u0003\u0001\u0000\u0000\u0000\'(\u0005\u0002\u0000\u0000"+
- "(,\u0005\u0004\u0000\u0000)*\u0005\u0002\u0000\u0000*,\u0005\u0006\u0000"+
- "\u0000+\'\u0001\u0000\u0000\u0000+)\u0001\u0000\u0000\u0000,\u0005\u0001"+
- "\u0000\u0000\u0000-.\u0005\u0002\u0000\u0000.8\u0005\u0005\u0000\u0000"+
- "/0\u0005\u0002\u0000\u000001\u0005\u0005\u0000\u000018\u0003\u0010\b\u0000"+
- "23\u0005\u0002\u0000\u000038\u0005\u0007\u0000\u000045\u0005\u0002\u0000"+
- "\u000056\u0005\u0007\u0000\u000068\u0003\u0010\b\u00007-\u0001\u0000\u0000"+
- "\u00007/\u0001\u0000\u0000\u000072\u0001\u0000\u0000\u000074\u0001\u0000"+
- "\u0000\u00008\u0007\u0001\u0000\u0000\u00009<\u0003\u0004\u0002\u0000"+
- ":<\u0003\u0006\u0003\u0000;9\u0001\u0000\u0000\u0000;:\u0001\u0000\u0000"+
- "\u0000<\t\u0001\u0000\u0000\u0000=>\u0003\b\u0004\u0000>\u000b\u0001\u0000"+
- "\u0000\u0000?C\u0005\u0003\u0000\u0000@A\u0005\u0003\u0000\u0000AC\u0003"+
- "\u0010\b\u0000B?\u0001\u0000\u0000\u0000B@\u0001\u0000\u0000\u0000C\r"+
- "\u0001\u0000\u0000\u0000DE\u0003\u0014\n\u0000E\u000f\u0001\u0000\u0000"+
- "\u0000FG\u0005\t\u0000\u0000GH\u0003\u0012\t\u0000H\u0011\u0001\u0000"+
- "\u0000\u0000IN\u0003\u0014\n\u0000JK\u0005\u0001\u0000\u0000KM\u0003\u0014"+
- "\n\u0000LJ\u0001\u0000\u0000\u0000MP\u0001\u0000\u0000\u0000NL\u0001\u0000"+
- "\u0000\u0000NO\u0001\u0000\u0000\u0000O\u0013\u0001\u0000\u0000\u0000"+
- "PN\u0001\u0000\u0000\u0000QU\u0005\u000b\u0000\u0000RU\u0005\n\u0000\u0000"+
- "SU\u0003\u0016\u000b\u0000TQ\u0001\u0000\u0000\u0000TR\u0001\u0000\u0000"+
- "\u0000TS\u0001\u0000\u0000\u0000UX\u0001\u0000\u0000\u0000VT\u0001\u0000"+
- "\u0000\u0000VW\u0001\u0000\u0000\u0000W\u0015\u0001\u0000\u0000\u0000"+
- "XV\u0001\u0000\u0000\u0000YZ\u0005\f\u0000\u0000Z\u0017\u0001\u0000\u0000"+
- "\u0000\t\u001b%+7;BNTV";
- public static final ATN _ATN =
- new ATNDeserializer().deserialize(_serializedATN.toCharArray());
- static {
- _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
- for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
- _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
- }
- }
-}
\ No newline at end of file
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseVisitor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseVisitor.java
deleted file mode 100644
index af88b64f76..0000000000
--- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseVisitor.java
+++ /dev/null
@@ -1,85 +0,0 @@
-// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0
-package io.edurt.datacap.sql.parser;
-import org.antlr.v4.runtime.tree.ParseTreeVisitor;
-
-/**
- * This interface defines a complete generic visitor for a parse tree produced
- * by {@link SqlBaseParser}.
- *
- * @param The return type of the visit operation. Use {@link Void} for
- * operations with no return type.
- */
-public interface SqlBaseVisitor extends ParseTreeVisitor {
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#singleStatement}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitSingleStatement(SqlBaseParser.SingleStatementContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#statement}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitStatement(SqlBaseParser.StatementContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#kafkaQueryTopicStatement}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#kafkaQueryConsumerStatement}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#kafkaQueryStatement}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#kafkaStatement}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitKafkaStatement(SqlBaseParser.KafkaStatementContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#childPathStatement}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#columnStatement}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitColumnStatement(SqlBaseParser.ColumnStatementContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#fromClause}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitFromClause(SqlBaseParser.FromClauseContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#tableName}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitTableName(SqlBaseParser.TableNameContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#identifier}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitIdentifier(SqlBaseParser.IdentifierContext ctx);
- /**
- * Visit a parse tree produced by {@link SqlBaseParser#quotedIdentifier}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx);
-}
\ No newline at end of file
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ExpressionProcessor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ExpressionProcessor.java
new file mode 100644
index 0000000000..2bc491d6e7
--- /dev/null
+++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ExpressionProcessor.java
@@ -0,0 +1,117 @@
+package io.edurt.datacap.sql.processor;
+
+import io.edurt.datacap.sql.node.Expression;
+import io.edurt.datacap.sql.parser.SqlBaseBaseVisitor;
+import io.edurt.datacap.sql.parser.SqlBaseParser;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class ExpressionProcessor
+ extends SqlBaseBaseVisitor
+{
+ @Override
+ public Expression visitAndExpression(SqlBaseParser.AndExpressionContext ctx)
+ {
+ Expression expr = new Expression();
+ expr.setType(Expression.ExpressionType.BINARY_OP);
+ expr.setValue("AND");
+
+ List children = new ArrayList<>();
+ children.add(visit(ctx.expression(0)));
+ children.add(visit(ctx.expression(1)));
+ expr.setChildren(children);
+
+ return expr;
+ }
+
+ @Override
+ public Expression visitOrExpression(SqlBaseParser.OrExpressionContext ctx)
+ {
+ Expression expr = new Expression();
+ expr.setType(Expression.ExpressionType.BINARY_OP);
+ expr.setValue("OR");
+
+ List children = new ArrayList<>();
+ children.add(visit(ctx.expression(0)));
+ children.add(visit(ctx.expression(1)));
+ expr.setChildren(children);
+
+ return expr;
+ }
+
+ @Override
+ public Expression visitComparisonExpression(SqlBaseParser.ComparisonExpressionContext ctx)
+ {
+ Expression expr = new Expression();
+ expr.setType(Expression.ExpressionType.BINARY_OP);
+ expr.setValue(ctx.comparisonOperator().getText());
+
+ List children = new ArrayList<>();
+ children.add(visit(ctx.expression(0)));
+ children.add(visit(ctx.expression(1)));
+ expr.setChildren(children);
+
+ return expr;
+ }
+
+ @Override
+ public Expression visitColumnReferencePrimary(SqlBaseParser.ColumnReferencePrimaryContext ctx)
+ {
+ Expression expr = new Expression();
+ expr.setType(Expression.ExpressionType.COLUMN_REFERENCE);
+ expr.setValue(ctx.columnReference().getText());
+ return expr;
+ }
+
+ @Override
+ public Expression visitLiteralPrimary(SqlBaseParser.LiteralPrimaryContext ctx)
+ {
+ Expression expr = new Expression();
+ expr.setType(Expression.ExpressionType.LITERAL);
+ expr.setValue(ctx.literal().getText());
+ return expr;
+ }
+
+ @Override
+ public Expression visitParenExpression(SqlBaseParser.ParenExpressionContext ctx)
+ {
+ return visit(ctx.expression());
+ }
+
+ @Override
+ public Expression visitFunctionCallPrimary(SqlBaseParser.FunctionCallPrimaryContext ctx)
+ {
+ Expression expr = new Expression();
+
+ // 检查是否是 VERSION 函数
+ // Check if it is a VERSION function
+ if (ctx.functionCall().VERSION() != null) {
+ expr.setType(Expression.ExpressionType.FUNCTION);
+ expr.setValue("VERSION");
+ return expr;
+ }
+
+ expr.setType(Expression.ExpressionType.FUNCTION);
+ expr.setValue(ctx.functionCall().functionName().getText());
+
+ // 直接获取函数参数的文本表示,而不是创建子表达式
+ // Directly get the text representation of function parameters, instead of creating child expressions
+ if (ctx.functionCall().expression() != null && !ctx.functionCall().expression().isEmpty()) {
+ SqlBaseParser.ExpressionContext firstArg = ctx.functionCall().expression(0);
+ String columnRef = firstArg.getText();
+
+ // 创建一个单独的 COLUMN_REFERENCE 表达式
+ // Create a separate COLUMN_REFERENCE expression
+ Expression columnExpr = new Expression();
+ columnExpr.setType(Expression.ExpressionType.COLUMN_REFERENCE);
+ columnExpr.setValue(columnRef);
+
+ List args = new ArrayList<>();
+ args.add(columnExpr);
+ expr.setChildren(args);
+ }
+
+ return expr;
+ }
+}
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ShowProcessor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ShowProcessor.java
new file mode 100644
index 0000000000..3b336c67c4
--- /dev/null
+++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ShowProcessor.java
@@ -0,0 +1,87 @@
+package io.edurt.datacap.sql.processor;
+
+import io.edurt.datacap.sql.parser.SqlBaseParser;
+import io.edurt.datacap.sql.statement.ShowStatement;
+
+public class ShowProcessor
+{
+ private final ExpressionProcessor expressionProcessor;
+
+ public ShowProcessor()
+ {
+ this.expressionProcessor = new ExpressionProcessor();
+ }
+
+ public ShowStatement process(SqlBaseParser.ShowStatementContext ctx)
+ {
+ ShowStatement statement = new ShowStatement();
+
+ if (ctx.showDatabasesStatement() != null) {
+ processShowDatabases(statement, ctx.showDatabasesStatement());
+ }
+ else if (ctx.showTablesStatement() != null) {
+ processShowTables(statement, ctx.showTablesStatement());
+ }
+ else if (ctx.showColumnsStatement() != null) {
+ processShowColumns(statement, ctx.showColumnsStatement());
+ }
+
+ return statement;
+ }
+
+ private void processShowDatabases(ShowStatement statement,
+ SqlBaseParser.ShowDatabasesStatementContext ctx)
+ {
+ statement.setShowType(ShowStatement.ShowType.DATABASES);
+ if (ctx.STRING() != null) {
+ // Remove quotes from the pattern string
+ String pattern = ctx.STRING().getText();
+ pattern = pattern.substring(1, pattern.length() - 1);
+ statement.setPattern(pattern);
+ }
+ }
+
+ private void processShowTables(ShowStatement statement,
+ SqlBaseParser.ShowTablesStatementContext ctx)
+ {
+ statement.setShowType(ShowStatement.ShowType.TABLES);
+
+ if (ctx.databaseName() != null) {
+ statement.setDatabaseName(ctx.databaseName().getText());
+ }
+
+ if (ctx.STRING() != null) {
+ // Remove quotes from the pattern string
+ String pattern = ctx.STRING().getText();
+ pattern = pattern.substring(1, pattern.length() - 1);
+ statement.setPattern(pattern);
+ }
+ else if (ctx.expression() != null) {
+ statement.setWhereCondition(expressionProcessor.visit(ctx.expression()));
+ }
+ }
+
+ private void processShowColumns(ShowStatement statement,
+ SqlBaseParser.ShowColumnsStatementContext ctx)
+ {
+ statement.setShowType(ShowStatement.ShowType.COLUMNS);
+
+ if (ctx.tableName() != null) {
+ statement.setTableName(ctx.tableName().getText());
+ }
+
+ if (ctx.databaseName() != null) {
+ statement.setDatabaseName(ctx.databaseName().getText());
+ }
+
+ if (ctx.STRING() != null) {
+ // Remove quotes from the pattern string
+ String pattern = ctx.STRING().getText();
+ pattern = pattern.substring(1, pattern.length() - 1);
+ statement.setPattern(pattern);
+ }
+ else if (ctx.expression() != null) {
+ statement.setWhereCondition(expressionProcessor.visit(ctx.expression()));
+ }
+ }
+}
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SQLStatement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SQLStatement.java
new file mode 100644
index 0000000000..24bff1bc31
--- /dev/null
+++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SQLStatement.java
@@ -0,0 +1,21 @@
+package io.edurt.datacap.sql.statement;
+
+public abstract class SQLStatement
+{
+ private final StatementType type;
+
+ public SQLStatement(StatementType type)
+ {
+ this.type = type;
+ }
+
+ public StatementType getType()
+ {
+ return type;
+ }
+
+ public enum StatementType
+ {
+ SELECT, INSERT, UPDATE, DELETE, CREATE, ALTER, DROP, USE, SHOW
+ }
+}
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SelectStatement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SelectStatement.java
new file mode 100644
index 0000000000..8b665db765
--- /dev/null
+++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SelectStatement.java
@@ -0,0 +1,32 @@
+package io.edurt.datacap.sql.statement;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import io.edurt.datacap.sql.node.Expression;
+import io.edurt.datacap.sql.node.clause.LimitClause;
+import io.edurt.datacap.sql.node.element.OrderByElement;
+import io.edurt.datacap.sql.node.element.SelectElement;
+import io.edurt.datacap.sql.node.element.TableElement;
+import lombok.Getter;
+import lombok.Setter;
+
+import java.util.List;
+
+@Getter
+@Setter
+@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"})
+public class SelectStatement
+ extends SQLStatement
+{
+ private List selectElements;
+ private List fromSources;
+ private Expression whereClause;
+ private List groupByElements;
+ private Expression havingClause;
+ private List orderByElements;
+ private LimitClause limitClause;
+
+ public SelectStatement()
+ {
+ super(StatementType.SELECT);
+ }
+}
diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/ShowStatement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/ShowStatement.java
new file mode 100644
index 0000000000..080bf2d862
--- /dev/null
+++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/ShowStatement.java
@@ -0,0 +1,31 @@
+package io.edurt.datacap.sql.statement;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import io.edurt.datacap.sql.node.Expression;
+import lombok.Getter;
+import lombok.Setter;
+
+@Getter
+@Setter
+@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"})
+public class ShowStatement
+ extends SQLStatement
+{
+ private ShowType showType;
+ private String databaseName;
+ private String tableName;
+ private String pattern;
+ private Expression whereCondition;
+
+ public ShowStatement()
+ {
+ super(StatementType.SHOW);
+ }
+
+ public enum ShowType
+ {
+ DATABASES,
+ TABLES,
+ COLUMNS
+ }
+}
diff --git a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/KafkaParserTest.java b/core/datacap-parser/src/test/java/io/edurt/datacap/sql/KafkaParserTest.java
deleted file mode 100644
index 7f2cfadff5..0000000000
--- a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/KafkaParserTest.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package io.edurt.datacap.sql;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-public class KafkaParserTest
-{
- private String table = "aa";
-
- @Test
- public void showTopic()
- {
- SqlBaseFormatter formatter = new SqlBaseFormatter("show topics");
- Assert.assertTrue(formatter.getParseResult().isSuccessful());
- }
-
- @Test
- public void showConsumers()
- {
- SqlBaseFormatter formatter = new SqlBaseFormatter("show Consumers");
- Assert.assertTrue(formatter.getParseResult().isSuccessful());
-
- formatter = new SqlBaseFormatter("show Consumers from " + table);
- Assert.assertEquals(formatter.getParseResult().getTable(), table);
- }
-
- @Test
- public void showDatabases()
- {
- SqlBaseFormatter formatter = new SqlBaseFormatter("show databases");
- Assert.assertTrue(formatter.getParseResult().getToken().equals("SHOW"));
- }
-
- @Test
- public void showTables()
- {
- SqlBaseFormatter formatter = new SqlBaseFormatter("show tables");
- Assert.assertTrue(formatter.getParseResult().getChildToken().equals("TABLES"));
-
- formatter = new SqlBaseFormatter("show tables from " + table);
- Assert.assertEquals(formatter.getParseResult().getTable(), table);
- }
-}
diff --git a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/ShowPathsTest.java b/core/datacap-parser/src/test/java/io/edurt/datacap/sql/ShowPathsTest.java
deleted file mode 100644
index b27396678e..0000000000
--- a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/ShowPathsTest.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package io.edurt.datacap.sql;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-public class ShowPathsTest
-{
- @Test
- public void testShowPaths()
- {
- SqlBaseFormatter formatter = new SqlBaseFormatter("show paths");
- Assert.assertTrue(formatter.getParseResult().isSuccessful());
-
- formatter = new SqlBaseFormatter("SHOW PATHS");
- Assert.assertTrue(formatter.getParseResult().isSuccessful());
- }
- @Test
- public void testShowPathsFrom()
- {
- SqlBaseFormatter formatter = new SqlBaseFormatter("show paths from aa");
- Assert.assertTrue(formatter.getParseResult().isSuccessful());
-
- formatter = new SqlBaseFormatter("SHOW PATHS FROM aaa");
- Assert.assertTrue(formatter.getParseResult().isSuccessful());
- }
-}
\ No newline at end of file
diff --git a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/SqlBaseFormatterTest.java b/core/datacap-parser/src/test/java/io/edurt/datacap/sql/SqlBaseFormatterTest.java
deleted file mode 100644
index 0fb988fb41..0000000000
--- a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/SqlBaseFormatterTest.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package io.edurt.datacap.sql;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-public class SqlBaseFormatterTest
-{
- @Test
- public void getParseResult()
- {
- String sql = "SELECT * FROM a";
- SqlBaseFormatter formatter = new SqlBaseFormatter(sql);
- Assert.assertTrue(formatter.getParseResult().isSuccessful());
- }
-}
\ No newline at end of file
diff --git a/core/datacap-plugin/pom.xml b/core/datacap-plugin/pom.xml
index 5e84068948..4e8fbde2d7 100644
--- a/core/datacap-plugin/pom.xml
+++ b/core/datacap-plugin/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/core/datacap-security/pom.xml b/core/datacap-security/pom.xml
index 14debfbf35..01ae7308f7 100644
--- a/core/datacap-security/pom.xml
+++ b/core/datacap-security/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/core/datacap-server/pom.xml b/core/datacap-server/pom.xml
index 00e4010761..db69e0f4b5 100644
--- a/core/datacap-server/pom.xml
+++ b/core/datacap-server/pom.xml
@@ -5,7 +5,7 @@
datacap
io.edurt.datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/core/datacap-service/pom.xml b/core/datacap-service/pom.xml
index 8c8263c60d..6872fa387b 100644
--- a/core/datacap-service/pom.xml
+++ b/core/datacap-service/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/core/datacap-service/src/main/java/io/edurt/datacap/service/security/UserDetailsService.java b/core/datacap-service/src/main/java/io/edurt/datacap/service/security/UserDetailsService.java
index bcbefa67bf..111b910166 100644
--- a/core/datacap-service/src/main/java/io/edurt/datacap/service/security/UserDetailsService.java
+++ b/core/datacap-service/src/main/java/io/edurt/datacap/service/security/UserDetailsService.java
@@ -48,6 +48,7 @@ public UserDetailsService(Long id, String code, String username, String password
public static UserDetailsService build(UserEntity user)
{
+ // TODO: Add no code alert
List authorities = user.getRoles().stream()
.map(role -> new SimpleGrantedAuthority(role.getCode()))
.collect(Collectors.toList());
diff --git a/core/datacap-spi/pom.xml b/core/datacap-spi/pom.xml
index 5d6193e7b7..8de62ecd8f 100644
--- a/core/datacap-spi/pom.xml
+++ b/core/datacap-spi/pom.xml
@@ -5,7 +5,7 @@
datacap
io.edurt.datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/Parser.java b/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/Parser.java
index ca813f0bcb..195b274763 100644
--- a/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/Parser.java
+++ b/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/Parser.java
@@ -1,10 +1,10 @@
package io.edurt.datacap.spi.parser;
-import io.edurt.datacap.sql.SqlBase;
+import io.edurt.datacap.sql.statement.SQLStatement;
public interface Parser
{
- SqlBase getSqlBase();
+ SQLStatement getStatement();
String getExecuteContext();
}
diff --git a/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/SqlParser.java b/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/SqlParser.java
index a93a34d626..a9f9e39764 100644
--- a/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/SqlParser.java
+++ b/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/SqlParser.java
@@ -1,24 +1,22 @@
package io.edurt.datacap.spi.parser;
-import io.edurt.datacap.sql.SqlBase;
-import io.edurt.datacap.sql.SqlBaseFormatter;
+import io.edurt.datacap.sql.SQLParser;
+import io.edurt.datacap.sql.statement.SQLStatement;
public class SqlParser
implements Parser
{
private final String content;
- private SqlBaseFormatter formatter;
public SqlParser(String content)
{
this.content = content;
- this.formatter = new SqlBaseFormatter(this.content);
}
@Override
- public SqlBase getSqlBase()
+ public SQLStatement getStatement()
{
- return this.formatter.getParseResult();
+ return SQLParser.parse(content.trim());
}
@Override
diff --git a/core/datacap-sql/pom.xml b/core/datacap-sql/pom.xml
index d73b031bc0..44d449b21f 100644
--- a/core/datacap-sql/pom.xml
+++ b/core/datacap-sql/pom.xml
@@ -4,7 +4,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/core/datacap-ui/package.json b/core/datacap-ui/package.json
index 2d77efc161..db14c0d011 100644
--- a/core/datacap-ui/package.json
+++ b/core/datacap-ui/package.json
@@ -1,7 +1,7 @@
{
"name": "datacap-ui",
"description": "DataCap console",
- "version": "2024.4.0",
+ "version": "2024.4.1-SNAPSHOT",
"private": true,
"scripts": {
"dev": "vite",
diff --git a/docs/docs/driver/mongodb.md b/docs/docs/driver/mongodb.md
new file mode 100644
index 0000000000..08123bb51d
--- /dev/null
+++ b/docs/docs/driver/mongodb.md
@@ -0,0 +1,102 @@
+---
+title: MongoDB Driver
+---
+
+DataCap MongoDB Driver 主要要用于在 DataCap 中连接和操作 MongoDB 数据库。该驱动支持以下语法:
+
+- `SHOW ...` 语法
+- `SELECT ...` 语法
+
+DataCap MongoDB Driver 适用于所有 DataCap 版本。
+
+## 使用方式
+
+---
+
+```xml
+
+ io.edurt.datacap
+ datacap-driver-mongodb
+ ${VERSION}
+ test
+
+```
+
+`VERSION` 可以在 Maven 中央仓库中找到。
+
+驱动名称:`io.edurt.datacap.driver.MongoJdbcDriver`
+
+支持的连接语法:
+
+- `jdbc:mongodb:`
+- `jdbc:mongo:`
+- `jdbc:mongodb+srv:`
+
+### 使用示例
+
+- 授权用户
+
+```java
+Class.forName("io.edurt.datacap.driver.MongoJdbcDriver");
+Properties props = new Properties();
+props.setProperty("database", "xxxx");
+props.setProperty("user", "xxxx");
+props.setProperty("password", "xxxx");
+
+String jdbcUrl = String.format("jdbc:mongodb://%s:%d", "127.0.0.1", 27017);
+connection = DriverManager.getConnection(jdbcUrl, props);
+```
+
+- 非授权用户
+
+```java
+Class.forName("io.edurt.datacap.driver.MongoJdbcDriver");
+String jdbcUrl = String.format("jdbc:mongodb://%s:%d", "127.0.0.1", 27017);
+connection = DriverManager.getConnection(jdbcUrl);
+```
+
+## SHOW 语法
+
+DataCap MongoDB Driver 支持以下 SHOW 语法:
+
+- `SHOW DATABASES`
+- `SHOW DATABASES LIKE ...`
+- `SHOW TABLES`
+- `SHOW TABLES FROM ...`
+- `SHOW TABLES LIKE ...`
+- `SHOW COLUMNS`
+- `SHOW COLUMNS FROM ...`
+- `SHOW COLUMNS FROM ... FROM ...`
+- `SHOW COLUMNS FROM ... LIKE ...`
+
+## SELECT 语法
+
+DataCap MongoDB Driver 支持以下 SELECT 语法:
+
+- `SELECT * FROM ...`
+- `SELECT ... FROM ...`
+- `SELECT column_name AS alias_name FROM ...`
+- `SELECT column_name AS alias_name, ... FROM ...`
+- `SELECT column_name AS alias_name, ... FROM ... WHERE ...`
+- `SELECT column_name AS alias_name, ... FROM ... WHERE ... ORDER BY ...`
+- `SELECT column_name AS alias_name, ... FROM ... WHERE ... ORDER BY ... LIMIT ...`
+- `SELECT column_name AS alias_name, ... FROM ... WHERE ... GROUP BY ...`
+- `SELECT column_name AS alias_name, ... FROM ... WHERE ... GROUP BY ... LIMIT ... OFFSET ...`
+- `SELECT column_name AS alias_name, SUM(columnName) ... FROM ... WHERE ... GROUP BY ...`
+
+### 聚合函数
+
+DataCap MongoDB Driver 支持以下聚合函数:
+
+- `COUNT(*)`
+- `COUNT(columnName)`
+- `SUM(columnName)`
+- `AVG(columnName)`
+- `MIN(columnName)`
+- `MAX(columnName)`
+
+## 系统函数
+
+DataCap MongoDB Driver 支持以下系统函数:
+
+- `SELECT VERSION()`
diff --git a/docs/docs/release/latest.en.md b/docs/docs/release/latest.en.md
index 348218fb03..814972ee19 100644
--- a/docs/docs/release/latest.en.md
+++ b/docs/docs/release/latest.en.md
@@ -1,12 +1,19 @@
**DataCap Released!**
-| Release Version | Published |
-|:--------:|:------------:|
-| `2024.4` | `2024-12-02` |
+| Release Version | Published |
+|:---------------:|:------------:|
+| `2024.4.0` | `2024-12-02` |
!!! note
-This is a brand new version that uses a new plugin management system, new APIs, and other new features. This update is a new version and is not compatible with the previous version. Make a backup of your data before upgrading to avoid data loss. The database is compatible, as long as the upgraded SQL is executed.
+ This is a brand new version that uses a new plugin management system, new APIs, and other new features. This update is a new version and is not compatible with the previous version. Make a backup of your data before upgrading to avoid data loss. The database is compatible, as long as the upgraded SQL is executed.
+ It should also be noted that after upgrading the version, you need to modify the value of the 'code' field of the 'datacap_user' and 'datacap_role' tables, which is unique for each piece of data, otherwise it will cause you to be unable to log in. (If it is a clean installation, you can ignore this step)
+ Execute the following SQL statement to upgrade the database:
+ ```sql
+ INSERT INTO `datacap_menu` VALUES
+ (18,'全局 - 商店','STORE','','/store','',3,'VIEW',0,1,'common.store','Store',NULL,'2024-11-05 21:18:28',0,0,NULL);
+ INSERT INTO `datacap_role_menu_relation` VALUES ('1','18')
+ ```
#### Key features
@@ -24,5 +31,4 @@ This is a brand new version that uses a new plugin management system, new APIs,
---
-- Added Open API documentation
--
\ No newline at end of file
+- Added Open API documentation
\ No newline at end of file
diff --git a/docs/docs/release/latest.md b/docs/docs/release/latest.md
index 7d5b96afa9..47d109e44d 100644
--- a/docs/docs/release/latest.md
+++ b/docs/docs/release/latest.md
@@ -1,12 +1,19 @@
**DataCap 发布!**
-| 发布版本 | 发布时间 |
-|:--------:|:------------:|
-| `2024.4` | `2024-12-02` |
+| 发布版本 | 发布时间 |
+|:----------:|:------------:|
+| `2024.4.0` | `2024-12-02` |
!!! note
- 本版本是一个全新的版本,完全使用了新的插件管理系统,新的 API 等各种新特性。本次更新为全新的版本,不兼容之前的版本。升级前要做好数据备份,以免数据丢失。数据库是兼容的,只要执行升级的 SQL 就可以了。
+ 本版本是一个全新的版本,完全使用了新的插件管理系统,新的 API 等各种新特性。本次更新为全新的版本,不兼容之前的版本。升级前要做好数据备份,以免数据丢失。数据库是兼容的,只要执行升级的 SQL 就可以了。
+ 还需要注意的是升级版本后,要修改 `datacap_user` 和 `datacap_role` 表的 `code` 字段的值每条数据唯一即可,否则会导致无法登录。(如果是全新安装可忽略这个步骤)
+ 执行以下 SQL 语句升级数据库:
+ ```sql
+ INSERT INTO `datacap_menu` VALUES
+ (18,'全局 - 商店','STORE','','/store','',3,'VIEW',0,1,'common.store','Store',NULL,'2024-11-05 21:18:28',0,0,NULL);
+ INSERT INTO `datacap_role_menu_relation` VALUES ('1','18')
+ ```
#### 主要功能
diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml
index 1336cb210b..9bcf8f59cd 100644
--- a/docs/mkdocs.yml
+++ b/docs/mkdocs.yml
@@ -134,6 +134,7 @@ plugins:
ApiPlugin: 插件 API
ApiDashboard: 仪表盘 API
ApiMenu: 菜单 API
+ NavDriver: 驱动
- locale: en
name: English
build: true
@@ -158,6 +159,7 @@ plugins:
ApiPlugin: Plugin API
ApiDashboard: Dashboard API
ApiMenu: Menu API
+ NavDriver: Driver
- search
- git-revision-date-localized:
enable_creation_date: true
@@ -309,5 +311,7 @@ nav:
- api/menu/list.md
- api/menu/save.md
- api/menu/edit.md
+ - NavDriver:
+ - driver/mongodb.md
- useCases.md
- partners.md
diff --git a/driver/datacap-driver-mongo/pom.xml b/driver/datacap-driver-mongo/pom.xml
index 317be37d4c..f0debc95d7 100644
--- a/driver/datacap-driver-mongo/pom.xml
+++ b/driver/datacap-driver-mongo/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/driver/datacap-driver-mongodb/pom.xml b/driver/datacap-driver-mongodb/pom.xml
new file mode 100644
index 0000000000..f7adf59e28
--- /dev/null
+++ b/driver/datacap-driver-mongodb/pom.xml
@@ -0,0 +1,31 @@
+
+
+ 4.0.0
+
+ io.edurt.datacap
+ datacap
+ 2024.4.1-SNAPSHOT
+ ../../pom.xml
+
+
+ datacap-driver-mongodb
+ DataCap - MongoDB - Driver
+
+
+
+ org.mongodb
+ mongodb-driver-sync
+
+
+ io.edurt.datacap
+ datacap-parser
+
+
+ ch.qos.logback
+ logback-classic
+ ${logback.version}
+
+
+
diff --git a/driver/datacap-driver-mongodb/src/main/java/com/dbschema/MongoJdbcDriver.java b/driver/datacap-driver-mongodb/src/main/java/com/dbschema/MongoJdbcDriver.java
new file mode 100644
index 0000000000..aae7fc3a93
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/com/dbschema/MongoJdbcDriver.java
@@ -0,0 +1,20 @@
+package com.dbschema;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+
+import java.sql.DriverManager;
+import java.sql.SQLException;
+
+@SuppressFBWarnings(value = {"NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"})
+public class MongoJdbcDriver
+ extends io.edurt.datacap.driver.MongoJdbcDriver
+{
+ static {
+ try {
+ DriverManager.registerDriver(new MongoJdbcDriver());
+ }
+ catch (SQLException e) {
+ throw new RuntimeException("Can't register com.dbschema.MongoJdbcDriver", e);
+ }
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoConnection.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoConnection.java
new file mode 100644
index 0000000000..042bad6f40
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoConnection.java
@@ -0,0 +1,478 @@
+package io.edurt.datacap.driver;
+
+import com.mongodb.MongoClientSettings;
+import com.mongodb.MongoCredential;
+import com.mongodb.ServerAddress;
+import com.mongodb.client.MongoClient;
+import com.mongodb.client.MongoClients;
+import com.mongodb.client.MongoDatabase;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import lombok.Getter;
+
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.CallableStatement;
+import java.sql.Clob;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.NClob;
+import java.sql.PreparedStatement;
+import java.sql.SQLClientInfoException;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Savepoint;
+import java.sql.Statement;
+import java.sql.Struct;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.Executor;
+
+@SuppressFBWarnings(value = {"CT_CONSTRUCTOR_THROW", "NP_NONNULL_RETURN_VIOLATION"})
+public class MongoConnection
+ implements Connection
+{
+ @Getter
+ private final MongoDatabase database;
+ private final MongoClient mongoClient;
+ private boolean isClosed = false;
+
+ // Constructor to establish MongoDB connection
+ // 构造函数用于建立MongoDB连接
+ public MongoConnection(String url, Properties info)
+ throws SQLException
+ {
+ try {
+ String databaseName = info.getProperty("database", "admin");
+
+ // 如果URL中包含认证信息,直接使用URL创建客户端
+ // If the URL contains authentication information, create a client directly using the URL
+ if (url.contains("@")) {
+ this.mongoClient = MongoClients.create(url);
+ }
+ else {
+ // 否则检查Properties中的认证信息
+ // Otherwise, check the authentication information in Properties
+ String username = info.getProperty("user");
+ String password = info.getProperty("password");
+
+ if (username != null && password != null) {
+ // 创建认证凭证
+ // Create authentication credentials
+ MongoCredential credential = MongoCredential.createCredential(
+ username,
+ databaseName,
+ password.toCharArray()
+ );
+
+ // 解析主机和端口
+ // Parse host and port
+ String[] hostPort = url.split("://")[1].split(":");
+ String host = hostPort[0];
+ int port = hostPort.length > 1 ? Integer.parseInt(hostPort[1]) : 27017;
+
+ // 创建带认证的客户端设置
+ // Create client settings with authentication
+ MongoClientSettings settings = MongoClientSettings.builder()
+ .credential(credential)
+ .applyToClusterSettings(builder ->
+ builder.hosts(Collections.singletonList(new ServerAddress(host, port))))
+ .build();
+
+ this.mongoClient = MongoClients.create(settings);
+ }
+ else {
+ // 无认证信息,直接连接
+ // No authentication information, connect directly
+ // Remove jdbc:
+ this.mongoClient = MongoClients.create(url.substring(5));
+ }
+ }
+
+ this.database = mongoClient.getDatabase(databaseName);
+
+ // 验证连接
+ // Verify connection
+ database.runCommand(new org.bson.Document("ping", 1));
+ }
+ catch (Exception e) {
+ throw new SQLException("Failed to connect to MongoDB: " + e.getMessage(), e);
+ }
+ }
+
+ // Create statement for executing queries
+ // 创建用于执行查询的Statement
+ @Override
+ public Statement createStatement()
+ throws SQLException
+ {
+ checkClosed();
+ return new MongoStatement(this);
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public CallableStatement prepareCall(String sql)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public String nativeSQL(String sql)
+ throws SQLException
+ {
+ return "";
+ }
+
+ // Check if connection is closed
+ // 检查连接是否已关闭
+ private void checkClosed()
+ throws SQLException
+ {
+ if (isClosed) {
+ throw new SQLException("Connection is closed");
+ }
+ }
+
+ // Close the connection
+ // 关闭连接
+ @Override
+ public void close()
+ {
+ if (!isClosed) {
+ mongoClient.close();
+ isClosed = true;
+ }
+ }
+
+ // Check if connection is closed
+ // 检查连接是否已关闭
+ @Override
+ public boolean isClosed()
+ throws SQLException
+ {
+ return isClosed;
+ }
+
+ @Override
+ public DatabaseMetaData getMetaData()
+ throws SQLException
+ {
+ throw new SQLFeatureNotSupportedException("Method not supported");
+ }
+
+ @Override
+ public void setReadOnly(boolean readOnly)
+ throws SQLException
+ {}
+
+ @Override
+ public boolean isReadOnly()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void setCatalog(String catalog)
+ throws SQLException
+ {}
+
+ @Override
+ public String getCatalog()
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public void setTransactionIsolation(int level)
+ throws SQLException
+ {}
+
+ @Override
+ public int getTransactionIsolation()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public SQLWarning getWarnings()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void clearWarnings()
+ throws SQLException
+ {}
+
+ @Override
+ public Statement createStatement(int resultSetType, int resultSetConcurrency)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Map> getTypeMap()
+ throws SQLException
+ {
+ return Map.of();
+ }
+
+ @Override
+ public void setTypeMap(Map> map)
+ throws SQLException
+ {}
+
+ @Override
+ public void setHoldability(int holdability)
+ throws SQLException
+ {}
+
+ @Override
+ public int getHoldability()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public Savepoint setSavepoint()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Savepoint setSavepoint(String name)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void rollback(Savepoint savepoint)
+ throws SQLException
+ {}
+
+ @Override
+ public void releaseSavepoint(Savepoint savepoint)
+ throws SQLException
+ {}
+
+ @Override
+ public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, int[] columnIndexes)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, String[] columnNames)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Clob createClob()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Blob createBlob()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public NClob createNClob()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public SQLXML createSQLXML()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public boolean isValid(int timeout)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void setClientInfo(String name, String value)
+ throws SQLClientInfoException
+ {}
+
+ @Override
+ public void setClientInfo(Properties properties)
+ throws SQLClientInfoException
+ {}
+
+ @Override
+ public String getClientInfo(String name)
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public Properties getClientInfo()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Array createArrayOf(String typeName, Object[] elements)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Struct createStruct(String typeName, Object[] attributes)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void setSchema(String schema)
+ throws SQLException
+ {}
+
+ @Override
+ public String getSchema()
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public void abort(Executor executor)
+ throws SQLException
+ {}
+
+ @Override
+ public void setNetworkTimeout(Executor executor, int milliseconds)
+ throws SQLException
+ {}
+
+ @Override
+ public int getNetworkTimeout()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public void setAutoCommit(boolean autoCommit)
+ throws SQLException
+ {
+ // MongoDB doesn't support transactions in the same way as relational databases
+ // MongoDB 不支持与关系数据库相同的事务
+ throw new UnsupportedOperationException("MongoDB doesn't support transactions in the same way as relational databases");
+ }
+
+ @Override
+ public boolean getAutoCommit()
+ throws SQLException
+ {
+ return true;
+ }
+
+ @Override
+ public void commit()
+ throws SQLException
+ {}
+
+ @Override
+ public void rollback()
+ throws SQLException
+ {}
+
+ @Override
+ public T unwrap(Class iface)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public boolean isWrapperFor(Class> iface)
+ throws SQLException
+ {
+ return false;
+ }
+
+ public MongoClient getClient()
+ {
+ return mongoClient;
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoJdbcDriver.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoJdbcDriver.java
new file mode 100644
index 0000000000..b1d6261287
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoJdbcDriver.java
@@ -0,0 +1,93 @@
+package io.edurt.datacap.driver;
+
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.DriverPropertyInfo;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.util.Properties;
+import java.util.logging.Logger;
+
+public class MongoJdbcDriver
+ implements Driver
+{
+ // Static initialization of driver
+ // 静态初始化驱动
+ static {
+ try {
+ DriverManager.registerDriver(new MongoJdbcDriver());
+ }
+ catch (SQLException e) {
+ throw new RuntimeException("Can't register MongoDB JDBC Driver", e);
+ }
+ }
+
+ // Check if this driver can handle the given URL
+ // 检查驱动是否可以处理给定的URL
+ @Override
+ public boolean acceptsURL(String url)
+ throws SQLException
+ {
+ return url != null && (
+ url.startsWith("jdbc:mongo:")
+ || url.startsWith("jdbc:mongodb:")
+ || url.startsWith("jdbc:mongodb+srv:")
+ );
+ }
+
+ // Connect to MongoDB database
+ // 连接MongoDB数据库
+ @Override
+ public Connection connect(String url, Properties info)
+ throws SQLException
+ {
+ if (!acceptsURL(url)) {
+ return null;
+ }
+
+ return new MongoConnection(url, info);
+ }
+
+ // Get driver's major version
+ // 获取驱动主版本号
+ @Override
+ public int getMajorVersion()
+ {
+ return 1;
+ }
+
+ // Get driver's minor version
+ // 获取驱动次版本号
+ @Override
+ public int getMinorVersion()
+ {
+ return 0;
+ }
+
+ // Get driver's property info
+ // 获取驱动属性信息
+ @Override
+ public DriverPropertyInfo[] getPropertyInfo(String url, Properties info)
+ throws SQLException
+ {
+ return new DriverPropertyInfo[0];
+ }
+
+ // Check if driver is JDBC compliant
+ // 检查驱动是否符合JDBC规范
+ @Override
+ public boolean jdbcCompliant()
+ {
+ return false;
+ }
+
+ // Get parent logger
+ // 获取父日志记录器
+ @Override
+ public Logger getParentLogger()
+ throws SQLFeatureNotSupportedException
+ {
+ throw new SQLFeatureNotSupportedException("Parent logger is not supported");
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSet.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSet.java
new file mode 100644
index 0000000000..0bfca00853
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSet.java
@@ -0,0 +1,1348 @@
+package io.edurt.datacap.driver;
+
+import com.mongodb.client.AggregateIterable;
+import com.mongodb.client.MongoCursor;
+import org.bson.Document;
+
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.Clob;
+import java.sql.Date;
+import java.sql.NClob;
+import java.sql.Ref;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.RowId;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Statement;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.List;
+import java.util.Map;
+
+public class MongoResultSet
+ implements ResultSet
+{
+ private final MongoCursor cursor;
+ private final List columnNames;
+ private Document current;
+ private boolean isClosed = false;
+ private ResultSetMetaData metadata;
+
+ // Constructor
+ // 构造函数
+ public MongoResultSet(AggregateIterable result)
+ {
+ this.cursor = result.iterator();
+ this.columnNames = new ArrayList<>();
+ this.current = null;
+ this.metadata = null;
+
+ // 预处理第一个文档以获取列名
+ // Preprocess the first document to get the column names
+ if (cursor.hasNext()) {
+ Document first = result.first();
+ if (first != null) {
+ columnNames.addAll(first.keySet());
+ this.metadata = new MongoResultSetMetaData(columnNames, first);
+ this.current = first;
+ }
+ }
+ }
+
+ // Move to next row
+ // 移动到下一行
+ @Override
+ public boolean next()
+ throws SQLException
+ {
+ checkClosed();
+
+ if (cursor.hasNext()) {
+ current = cursor.next();
+ return true;
+ }
+ current = null;
+ return false;
+ }
+
+ // Get string value by column name
+ // 通过列名获取字符串值
+ @Override
+ public String getString(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ if (current == null) {
+ throw new SQLException("No current row");
+ }
+ Object value = current.get(columnLabel);
+ return value == null ? null : value.toString();
+ }
+
+ @Override
+ public boolean getBoolean(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.getBoolean(columnLabel);
+ }
+
+ @Override
+ public byte getByte(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ Object value = current.get(columnLabel);
+ if (value == null) {
+ throw new SQLException("Null value");
+ }
+
+ return String.valueOf(value).getBytes(Charset.defaultCharset())[0];
+ }
+
+ @Override
+ public short getShort(String columnLabel)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ // Get integer value by column name
+ // 通过列名获取整数值
+ @Override
+ public int getInt(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.getInteger(columnLabel);
+ }
+
+ @Override
+ public long getLong(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.getLong(columnLabel);
+ }
+
+ @Override
+ public float getFloat(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ Object value = current.get(columnLabel);
+ if (value == null) {
+ throw new SQLException("Null value");
+ }
+ if (value instanceof Number) {
+ return ((Number) value).floatValue();
+ }
+ throw new SQLException("Invalid type for float column");
+ }
+
+ @Override
+ public double getDouble(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.getDouble(columnLabel);
+ }
+
+ @Override
+ public BigDecimal getBigDecimal(String columnLabel, int scale)
+ throws SQLException
+ {
+ checkClosed();
+
+ Object value = current.get(columnLabel);
+ if (value == null) {
+ throw new SQLException("Null value");
+ }
+ if (value instanceof Number) {
+ return new BigDecimal(value.toString());
+ }
+ throw new SQLException("Invalid type for BigDecimal column");
+ }
+
+ @Override
+ public byte[] getBytes(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ Object value = current.get(columnLabel);
+ if (value == null) {
+ throw new SQLException("Null value");
+ }
+ if (value instanceof byte[]) {
+ return (byte[]) value;
+ }
+ return new byte[0];
+ }
+
+ @Override
+ public Date getDate(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ Object value = current.get(columnLabel);
+ if (value == null) {
+ throw new SQLException("Null value");
+ }
+ if (value instanceof Date) {
+ return Date.valueOf(String.valueOf(value));
+ }
+ throw new SQLException("Invalid type for date column");
+ }
+
+ @Override
+ public Time getTime(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ Object value = current.get(columnLabel);
+ if (value == null) {
+ throw new SQLException("Null value");
+ }
+ if (value instanceof Time) {
+ return Time.valueOf(String.valueOf(value));
+ }
+ throw new SQLException("Invalid type for time column");
+ }
+
+ @Override
+ public Timestamp getTimestamp(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public InputStream getAsciiStream(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public InputStream getUnicodeStream(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public InputStream getBinaryStream(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public SQLWarning getWarnings()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void clearWarnings()
+ throws SQLException
+ {}
+
+ @Override
+ public String getCursorName()
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public ResultSetMetaData getMetaData()
+ throws SQLException
+ {
+ checkClosed();
+
+ return metadata;
+ }
+
+ @Override
+ public Object getObject(int columnIndex)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.get(columnIndex);
+ }
+
+ @Override
+ public Object getObject(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.get(columnLabel);
+ }
+
+ @Override
+ public int findColumn(String columnLabel)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public Reader getCharacterStream(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Reader getCharacterStream(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public BigDecimal getBigDecimal(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public BigDecimal getBigDecimal(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public boolean isBeforeFirst()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean isAfterLast()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean isFirst()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean isLast()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void beforeFirst()
+ throws SQLException
+ {}
+
+ @Override
+ public void afterLast()
+ throws SQLException
+ {}
+
+ @Override
+ public boolean first()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean last()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public int getRow()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public boolean absolute(int row)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean relative(int rows)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean previous()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void setFetchDirection(int direction)
+ throws SQLException
+ {}
+
+ @Override
+ public int getFetchDirection()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public void setFetchSize(int rows)
+ throws SQLException
+ {}
+
+ @Override
+ public int getFetchSize()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int getType()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int getConcurrency()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public boolean rowUpdated()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean rowInserted()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean rowDeleted()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void updateNull(int columnIndex)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBoolean(int columnIndex, boolean x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateByte(int columnIndex, byte x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateShort(int columnIndex, short x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateInt(int columnIndex, int x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateLong(int columnIndex, long x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateFloat(int columnIndex, float x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateDouble(int columnIndex, double x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBigDecimal(int columnIndex, BigDecimal x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateString(int columnIndex, String x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBytes(int columnIndex, byte[] x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateDate(int columnIndex, Date x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateTime(int columnIndex, Time x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateTimestamp(int columnIndex, Timestamp x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateAsciiStream(int columnIndex, InputStream x, int length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBinaryStream(int columnIndex, InputStream x, int length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateCharacterStream(int columnIndex, Reader x, int length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateObject(int columnIndex, Object x, int scaleOrLength)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateObject(int columnIndex, Object x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNull(String columnLabel)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBoolean(String columnLabel, boolean x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateByte(String columnLabel, byte x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateShort(String columnLabel, short x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateInt(String columnLabel, int x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateLong(String columnLabel, long x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateFloat(String columnLabel, float x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateDouble(String columnLabel, double x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBigDecimal(String columnLabel, BigDecimal x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateString(String columnLabel, String x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBytes(String columnLabel, byte[] x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateDate(String columnLabel, Date x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateTime(String columnLabel, Time x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateTimestamp(String columnLabel, Timestamp x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateAsciiStream(String columnLabel, InputStream x, int length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBinaryStream(String columnLabel, InputStream x, int length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateCharacterStream(String columnLabel, Reader reader, int length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateObject(String columnLabel, Object x, int scaleOrLength)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateObject(String columnLabel, Object x)
+ throws SQLException
+ {}
+
+ @Override
+ public void insertRow()
+ throws SQLException
+ {}
+
+ @Override
+ public void updateRow()
+ throws SQLException
+ {}
+
+ @Override
+ public void deleteRow()
+ throws SQLException
+ {}
+
+ @Override
+ public void refreshRow()
+ throws SQLException
+ {}
+
+ @Override
+ public void cancelRowUpdates()
+ throws SQLException
+ {}
+
+ @Override
+ public void moveToInsertRow()
+ throws SQLException
+ {}
+
+ @Override
+ public void moveToCurrentRow()
+ throws SQLException
+ {}
+
+ @Override
+ public Statement getStatement()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Object getObject(int columnIndex, Map> map)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Ref getRef(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Blob getBlob(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Clob getClob(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Array getArray(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Object getObject(String columnLabel, Map> map)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Ref getRef(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Blob getBlob(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Clob getClob(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Array getArray(String columnLabel)
+ throws SQLException
+ {
+ checkClosed();
+
+ return (Array) current.getList(columnLabel, List.class);
+ }
+
+ @Override
+ public Date getDate(int columnIndex, Calendar cal)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Date getDate(String columnLabel, Calendar cal)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Time getTime(int columnIndex, Calendar cal)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Time getTime(String columnLabel, Calendar cal)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Timestamp getTimestamp(int columnIndex, Calendar cal)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Timestamp getTimestamp(String columnLabel, Calendar cal)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public URL getURL(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public URL getURL(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void updateRef(int columnIndex, Ref x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateRef(String columnLabel, Ref x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBlob(int columnIndex, Blob x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBlob(String columnLabel, Blob x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateClob(int columnIndex, Clob x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateClob(String columnLabel, Clob x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateArray(int columnIndex, Array x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateArray(String columnLabel, Array x)
+ throws SQLException
+ {}
+
+ @Override
+ public RowId getRowId(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public RowId getRowId(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void updateRowId(int columnIndex, RowId x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateRowId(String columnLabel, RowId x)
+ throws SQLException
+ {}
+
+ @Override
+ public int getHoldability()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public boolean isClosed()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void updateNString(int columnIndex, String nString)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNString(String columnLabel, String nString)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNClob(int columnIndex, NClob nClob)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNClob(String columnLabel, NClob nClob)
+ throws SQLException
+ {}
+
+ @Override
+ public NClob getNClob(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public NClob getNClob(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public SQLXML getSQLXML(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public SQLXML getSQLXML(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void updateSQLXML(int columnIndex, SQLXML xmlObject)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateSQLXML(String columnLabel, SQLXML xmlObject)
+ throws SQLException
+ {}
+
+ @Override
+ public String getNString(int columnIndex)
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public String getNString(String columnLabel)
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public Reader getNCharacterStream(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Reader getNCharacterStream(String columnLabel)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void updateNCharacterStream(int columnIndex, Reader x, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNCharacterStream(String columnLabel, Reader reader, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateAsciiStream(int columnIndex, InputStream x, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBinaryStream(int columnIndex, InputStream x, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateCharacterStream(int columnIndex, Reader x, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateAsciiStream(String columnLabel, InputStream x, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBinaryStream(String columnLabel, InputStream x, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateCharacterStream(String columnLabel, Reader reader, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBlob(int columnIndex, InputStream inputStream, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBlob(String columnLabel, InputStream inputStream, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateClob(int columnIndex, Reader reader, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateClob(String columnLabel, Reader reader, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNClob(int columnIndex, Reader reader, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNClob(String columnLabel, Reader reader, long length)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNCharacterStream(int columnIndex, Reader x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNCharacterStream(String columnLabel, Reader reader)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateAsciiStream(int columnIndex, InputStream x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBinaryStream(int columnIndex, InputStream x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateCharacterStream(int columnIndex, Reader x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateAsciiStream(String columnLabel, InputStream x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBinaryStream(String columnLabel, InputStream x)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateCharacterStream(String columnLabel, Reader reader)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBlob(int columnIndex, InputStream inputStream)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateBlob(String columnLabel, InputStream inputStream)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateClob(int columnIndex, Reader reader)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateClob(String columnLabel, Reader reader)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNClob(int columnIndex, Reader reader)
+ throws SQLException
+ {}
+
+ @Override
+ public void updateNClob(String columnLabel, Reader reader)
+ throws SQLException
+ {}
+
+ @Override
+ public T getObject(int columnIndex, Class type)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public T getObject(String columnLabel, Class type)
+ throws SQLException
+ {
+ return null;
+ }
+
+ // Check if result set is closed
+ // 检查结果集是否已关闭
+ private void checkClosed()
+ throws SQLException
+ {
+ if (isClosed) {
+ throw new SQLException("ResultSet is closed");
+ }
+ }
+
+ // Close the result set
+ // 关闭结果集
+ @Override
+ public void close()
+ throws SQLException
+ {
+ if (!isClosed) {
+ cursor.close();
+ isClosed = true;
+ }
+ }
+
+ @Override
+ public boolean wasNull()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public String getString(int columnIndex)
+ throws SQLException
+ {
+ checkClosed();
+
+ String columnName = getColumnName(columnIndex);
+ Object value = current.get(columnName);
+ if (value == null) {
+ return null;
+ }
+
+ return value.toString();
+ }
+
+ @Override
+ public boolean getBoolean(int columnIndex)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.getBoolean(getColumnName(columnIndex));
+ }
+
+ @Override
+ public byte getByte(int columnIndex)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public short getShort(int columnIndex)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int getInt(int columnIndex)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.getInteger(getColumnName(columnIndex));
+ }
+
+ @Override
+ public long getLong(int columnIndex)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.getLong(getColumnName(columnIndex));
+ }
+
+ @Override
+ public float getFloat(int columnIndex)
+ throws SQLException
+ {
+ checkClosed();
+
+ Object value = current.get(getColumnName(columnIndex));
+ if (value == null) {
+ return 0;
+ }
+ if (value instanceof Number) {
+ return Float.parseFloat(value.toString());
+ }
+
+ throw new SQLException("Invalid type for float column");
+ }
+
+ @Override
+ public double getDouble(int columnIndex)
+ throws SQLException
+ {
+ checkClosed();
+
+ return current.getDouble(getColumnName(columnIndex));
+ }
+
+ @Override
+ public BigDecimal getBigDecimal(int columnIndex, int scale)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public byte[] getBytes(int columnIndex)
+ throws SQLException
+ {
+ return new byte[0];
+ }
+
+ @Override
+ public Date getDate(int columnIndex)
+ throws SQLException
+ {
+ checkClosed();
+
+ return Date.valueOf(current.get(getColumnName(columnIndex)).toString());
+ }
+
+ @Override
+ public Time getTime(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public Timestamp getTimestamp(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public InputStream getAsciiStream(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public InputStream getUnicodeStream(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public InputStream getBinaryStream(int columnIndex)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public T unwrap(Class iface)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public boolean isWrapperFor(Class> iface)
+ throws SQLException
+ {
+ return false;
+ }
+
+ private String getColumnName(int columnIndex)
+ {
+ if (columnIndex < 1 || columnIndex > columnNames.size()) {
+ throw new IllegalArgumentException("Invalid column index: " + columnIndex);
+ }
+ return columnNames.get(columnIndex - 1);
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSetMetaData.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSetMetaData.java
new file mode 100644
index 0000000000..298f116cab
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSetMetaData.java
@@ -0,0 +1,237 @@
+package io.edurt.datacap.driver;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.bson.Document;
+
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.List;
+import java.util.Map;
+
+@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2"})
+public class MongoResultSetMetaData
+ implements ResultSetMetaData
+{
+ private final List columnNames;
+ private final Map sampleRow;
+
+ // Constructor
+ // 构造函数
+ public MongoResultSetMetaData(List columnNames, Document sampleRow)
+ {
+ this.columnNames = columnNames;
+ this.sampleRow = sampleRow;
+ }
+
+ // Get number of columns
+ // 获取列数
+ @Override
+ public int getColumnCount()
+ throws SQLException
+ {
+ return columnNames.size();
+ }
+
+ // Get column name
+ // 获取列名
+ @Override
+ public String getColumnName(int column)
+ throws SQLException
+ {
+ checkColumnIndex(column);
+ return columnNames.get(column - 1);
+ }
+
+ // Get column label
+ // 获取列标签
+ @Override
+ public String getColumnLabel(int column)
+ throws SQLException
+ {
+ return getColumnName(column);
+ }
+
+ // Get column type
+ // 获取列类型
+ @Override
+ public int getColumnType(int column)
+ throws SQLException
+ {
+ checkColumnIndex(column);
+
+ String columnName = columnNames.get(column - 1);
+ Object value = sampleRow.get(columnName);
+ return MongoTypeHelper.getJdbcType(value);
+ }
+
+ // Get column type name
+ // 获取列类型名称
+ @Override
+ public String getColumnTypeName(int column)
+ throws SQLException
+ {
+ checkColumnIndex(column);
+
+ String columnName = columnNames.get(column - 1);
+ Object value = sampleRow.get(columnName);
+ return MongoTypeHelper.getTypeName(value);
+ }
+
+ // Get column class name
+ // 获取列的Java类名
+ @Override
+ public String getColumnClassName(int column)
+ throws SQLException
+ {
+ checkColumnIndex(column);
+
+ String columnName = columnNames.get(column - 1);
+ Object value = sampleRow.get(columnName);
+ return MongoTypeHelper.getJavaClassName(value);
+ }
+
+ // Check if column is nullable
+ // 检查列是否可为空
+ @Override
+ public int isNullable(int column)
+ throws SQLException
+ {
+ return columnNullable;
+ }
+
+ // Check if column is auto increment
+ // 检查列是否自动递增
+ @Override
+ public boolean isAutoIncrement(int column)
+ throws SQLException
+ {
+ String columnName = getColumnName(column);
+ return columnName.equals("_id");
+ }
+
+ // Check if column is case sensitive
+ // 检查列是否大小写敏感
+ @Override
+ public boolean isCaseSensitive(int column)
+ throws SQLException
+ {
+ return getColumnType(column) == Types.VARCHAR;
+ }
+
+ // Check if column is searchable
+ // 检查列是否可搜索
+ @Override
+ public boolean isSearchable(int column)
+ throws SQLException
+ {
+ return true;
+ }
+
+ // Check if column is currency
+ // 检查列是否货币类型
+ @Override
+ public boolean isCurrency(int column)
+ throws SQLException
+ {
+ return false;
+ }
+
+ // Validate column index
+ // 验证列索引
+ private void checkColumnIndex(int column)
+ throws SQLException
+ {
+ if (column < 1 || column > columnNames.size()) {
+ throw new SQLException("Invalid column index: " + column);
+ }
+ }
+
+ @Override
+ public boolean isSigned(int column)
+ throws SQLException
+ {
+ int type = getColumnType(column);
+ return type == Types.INTEGER || type == Types.BIGINT || type == Types.DOUBLE;
+ }
+
+ @Override
+ public int getColumnDisplaySize(int column)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int getPrecision(int column)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int getScale(int column)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public String getTableName(int column)
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public String getSchemaName(int column)
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public String getCatalogName(int column)
+ throws SQLException
+ {
+ return "";
+ }
+
+ @Override
+ public boolean isReadOnly(int column)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean isWritable(int column)
+ throws SQLException
+ {
+ return true;
+ }
+
+ @Override
+ public boolean isDefinitelyWritable(int column)
+ throws SQLException
+ {
+ return true;
+ }
+
+ @Override
+ public T unwrap(Class iface)
+ throws SQLException
+ {
+ if (iface.isAssignableFrom(getClass())) {
+ return iface.cast(this);
+ }
+ throw new SQLException("Cannot unwrap to " + iface.getName());
+ }
+
+ @Override
+ public boolean isWrapperFor(Class> iface)
+ throws SQLException
+ {
+ return iface.isAssignableFrom(getClass());
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoStatement.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoStatement.java
new file mode 100644
index 0000000000..8c9e722860
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoStatement.java
@@ -0,0 +1,476 @@
+package io.edurt.datacap.driver;
+
+import com.mongodb.client.AggregateIterable;
+import com.mongodb.client.MongoCollection;
+import com.mongodb.client.MongoDatabase;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import io.edurt.datacap.driver.iterable.InMemoryAggregateIterable;
+import io.edurt.datacap.driver.parser.MongoParser;
+import io.edurt.datacap.driver.parser.MongoShowParser;
+import lombok.extern.slf4j.Slf4j;
+import org.bson.Document;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+
+@Slf4j
+@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "NP_NULL_PARAM_DEREF"})
+public class MongoStatement
+ implements Statement
+{
+ private final MongoConnection connection;
+ private boolean isClosed = false;
+
+ // Constructor
+ // 构造函数
+ public MongoStatement(MongoConnection connection)
+ {
+ this.connection = connection;
+ }
+
+ // Execute query and return ResultSet
+ // 执行查询并返回ResultSet
+ @Override
+ public ResultSet executeQuery(String sql)
+ throws SQLException
+ {
+ checkClosed();
+
+ try {
+ // Parse SQL to MongoDB query
+ MongoParser parser = MongoParser.createParser(sql);
+ if (parser instanceof MongoShowParser) {
+ return executeShowStatement((MongoShowParser) parser);
+ }
+
+ Document query = parser.getQuery();
+ if (query.containsKey("buildInfo")) {
+ Document buildInfo = connection.getDatabase()
+ .runCommand(new Document("buildInfo", 1));
+
+ Document versionDoc = new Document();
+ versionDoc.put("version", buildInfo.getString("version"));
+ return new MongoResultSet(new InMemoryAggregateIterable(List.of(versionDoc)));
+ }
+
+ String collectionName = parser.getCollection();
+ log.debug("Executing query: {}", query);
+
+ String[] dbAndTb = parser.getCollection().split("\\.");
+ MongoDatabase db = connection.getDatabase();
+ if (dbAndTb.length > 1) {
+ db = connection.getClient().getDatabase(dbAndTb[0]);
+ collectionName = dbAndTb[1];
+ }
+
+ MongoCollection collection = db.getCollection(collectionName);
+
+ // Execute aggregate command
+ @SuppressWarnings("unchecked")
+ List pipeline = (List) query.get("pipeline");
+ AggregateIterable result = collection.aggregate(pipeline);
+
+ return new MongoResultSet(result);
+ }
+ catch (Exception e) {
+ throw new SQLException("Failed to execute query", e);
+ }
+ }
+
+ private ResultSet executeShowStatement(MongoShowParser parser)
+ throws SQLException
+ {
+ try {
+ switch (parser.getShowType()) {
+ case DATABASES:
+ return handleShowDatabases(parser);
+ case TABLES:
+ return handleShowTables(parser);
+ case COLUMNS:
+ return handleShowColumns(parser);
+ default:
+ throw new SQLException("Unsupported SHOW command type");
+ }
+ }
+ catch (Exception e) {
+ throw new SQLException("Failed to execute SHOW command", e);
+ }
+ }
+
+ private ResultSet handleShowDatabases(MongoShowParser parser)
+ {
+ List docs = connection.getClient().listDatabaseNames()
+ .map(name -> new Document("name", name))
+ .into(new ArrayList<>());
+ return new MongoResultSet(new InMemoryAggregateIterable(docs));
+ }
+
+ private ResultSet handleShowTables(MongoShowParser parser)
+ {
+ MongoDatabase db = parser.getDatabase() != null ?
+ connection.getClient().getDatabase(parser.getDatabase()) :
+ connection.getDatabase();
+
+ List docs = db.listCollectionNames()
+ .map(name -> new Document("name", name))
+ .into(new ArrayList<>());
+ return new MongoResultSet(new InMemoryAggregateIterable(docs));
+ }
+
+ private ResultSet handleShowColumns(MongoShowParser parser)
+ {
+ String[] dbAndTb = parser.getCollection().split("\\.");
+ String database = parser.getDatabase();
+ String table = parser.getCollection();
+ if (database == null && dbAndTb.length == 2) {
+ database = dbAndTb[0];
+ table = dbAndTb[1];
+ }
+
+ MongoDatabase db = connection.getClient().getDatabase(database);
+
+ Document sample = db.getCollection(table)
+ .find()
+ .limit(1)
+ .first();
+
+ List docs = new ArrayList<>();
+ if (sample != null) {
+ sample.keySet().forEach(field ->
+ docs.add(new Document("name", field))
+ );
+ }
+ return new MongoResultSet(new InMemoryAggregateIterable(docs));
+ }
+
+ private boolean matchesPattern(String value, String pattern)
+ {
+ if (pattern == null) {
+ return true;
+ }
+ return value.matches(pattern.replace("%", ".*"));
+ }
+
+ private String getMongoFieldType(Object value)
+ {
+ if (value == null) {
+ return "null";
+ }
+ if (value instanceof String) {
+ return "string";
+ }
+ if (value instanceof Integer) {
+ return "int";
+ }
+ if (value instanceof Long) {
+ return "long";
+ }
+ if (value instanceof Double) {
+ return "double";
+ }
+ if (value instanceof Boolean) {
+ return "boolean";
+ }
+ if (value instanceof Document) {
+ return "document";
+ }
+ if (value instanceof List) {
+ return "array";
+ }
+ return value.getClass().getSimpleName();
+ }
+
+ // Execute update statement
+ // 执行更新语句
+ @Override
+ public int executeUpdate(String sql)
+ throws SQLException
+ {
+ throw new UnsupportedOperationException("Update operation not supported");
+ }
+
+ // Check if statement is closed
+ // 检查语句是否已关闭
+ private void checkClosed()
+ throws SQLException
+ {
+ if (isClosed) {
+ throw new SQLException("Statement is closed");
+ }
+ }
+
+ // Close the statement
+ // 关闭语句
+ @Override
+ public void close()
+ throws SQLException
+ {
+ isClosed = true;
+ }
+
+ @Override
+ public int getMaxFieldSize()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public void setMaxFieldSize(int max)
+ throws SQLException
+ {}
+
+ @Override
+ public int getMaxRows()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public void setMaxRows(int max)
+ throws SQLException
+ {}
+
+ @Override
+ public void setEscapeProcessing(boolean enable)
+ throws SQLException
+ {}
+
+ @Override
+ public int getQueryTimeout()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public void setQueryTimeout(int seconds)
+ throws SQLException
+ {}
+
+ @Override
+ public void cancel()
+ throws SQLException
+ {}
+
+ @Override
+ public SQLWarning getWarnings()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public void clearWarnings()
+ throws SQLException
+ {}
+
+ @Override
+ public void setCursorName(String name)
+ throws SQLException
+ {}
+
+ @Override
+ public boolean execute(String sql)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public ResultSet getResultSet()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public int getUpdateCount()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public boolean getMoreResults()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void setFetchDirection(int direction)
+ throws SQLException
+ {}
+
+ @Override
+ public int getFetchDirection()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public void setFetchSize(int rows)
+ throws SQLException
+ {}
+
+ @Override
+ public int getFetchSize()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int getResultSetConcurrency()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int getResultSetType()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public void addBatch(String sql)
+ throws SQLException
+ {}
+
+ @Override
+ public void clearBatch()
+ throws SQLException
+ {}
+
+ @Override
+ public int[] executeBatch()
+ throws SQLException
+ {
+ return new int[0];
+ }
+
+ @Override
+ public Connection getConnection()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public boolean getMoreResults(int current)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public ResultSet getGeneratedKeys()
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public int executeUpdate(String sql, int autoGeneratedKeys)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int executeUpdate(String sql, int[] columnIndexes)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public int executeUpdate(String sql, String[] columnNames)
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public boolean execute(String sql, int autoGeneratedKeys)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean execute(String sql, int[] columnIndexes)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public boolean execute(String sql, String[] columnNames)
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public int getResultSetHoldability()
+ throws SQLException
+ {
+ return 0;
+ }
+
+ @Override
+ public boolean isClosed()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void setPoolable(boolean poolable)
+ throws SQLException
+ {}
+
+ @Override
+ public boolean isPoolable()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public void closeOnCompletion()
+ throws SQLException
+ {}
+
+ @Override
+ public boolean isCloseOnCompletion()
+ throws SQLException
+ {
+ return false;
+ }
+
+ @Override
+ public T unwrap(Class iface)
+ throws SQLException
+ {
+ return null;
+ }
+
+ @Override
+ public boolean isWrapperFor(Class> iface)
+ throws SQLException
+ {
+ return false;
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoTypeHelper.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoTypeHelper.java
new file mode 100644
index 0000000000..f3ae9e90e6
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoTypeHelper.java
@@ -0,0 +1,120 @@
+package io.edurt.datacap.driver;
+
+import java.sql.Types;
+import java.util.HashMap;
+import java.util.Map;
+
+public class MongoTypeHelper
+{
+ private static final Map TYPE_MAP = new HashMap<>();
+
+ private MongoTypeHelper() {}
+
+ // Get MongoDataType by class name
+ // 通过类名获取MongoDataType
+ public static MongoDataType getType(String className)
+ {
+ MongoDataType type = TYPE_MAP.get(className);
+ return type != null ? type : new MongoDataType(Types.OTHER, "OTHER", Object.class);
+ }
+
+ // Get MongoDataType by object
+ // 通过对象获取MongoDataType
+ public static MongoDataType getType(Object value)
+ {
+ if (value == null) {
+ return TYPE_MAP.get("NULL");
+ }
+ return getType(value.getClass().getSimpleName());
+ }
+
+ // Get JDBC type by class name
+ // 通过类名获取JDBC类型
+ public static int getJdbcType(String className)
+ {
+ return getType(className).getJdbcType();
+ }
+
+ // Get JDBC type by object
+ // 通过对象获取JDBC类型
+ public static int getJdbcType(Object value)
+ {
+ return getType(value).getJdbcType();
+ }
+
+ // Get type name by class name
+ // 通过类名获取类型名称
+ public static String getTypeName(String className)
+ {
+ return getType(className).getTypeName();
+ }
+
+ // Get type name by object
+ // 通过对象获取类型名称
+ public static String getTypeName(Object value)
+ {
+ return getType(value).getTypeName();
+ }
+
+ // Get Java class name by class name
+ // 通过类名获取Java类名
+ public static String getJavaClassName(String className)
+ {
+ return getType(className).getJavaClassName();
+ }
+
+ // Get Java class name by object
+ // 通过对象获取Java类名
+ public static String getJavaClassName(Object value)
+ {
+ return getType(value).getJavaClassName();
+ }
+
+ // Data type container class
+ // 数据类型容器类
+ public static class MongoDataType
+ {
+ private final int jdbcType;
+ private final String typeName;
+ private final Class> javaClass;
+
+ public MongoDataType(int jdbcType, String typeName, Class> javaClass)
+ {
+ this.jdbcType = jdbcType;
+ this.typeName = typeName;
+ this.javaClass = javaClass;
+ }
+
+ public int getJdbcType()
+ {
+ return jdbcType;
+ }
+
+ public String getTypeName()
+ {
+ return typeName;
+ }
+
+ public String getJavaClassName()
+ {
+ return javaClass.getName();
+ }
+ }
+
+ static {
+ // Initialize basic types
+ // 初始化基本类型
+ TYPE_MAP.put("String", new MongoDataType(Types.VARCHAR, "VARCHAR", String.class));
+ TYPE_MAP.put("ObjectId", new MongoDataType(Types.VARCHAR, "VARCHAR", String.class));
+ TYPE_MAP.put("Integer", new MongoDataType(Types.INTEGER, "INTEGER", Integer.class));
+ TYPE_MAP.put("Long", new MongoDataType(Types.BIGINT, "BIGINT", Long.class));
+ TYPE_MAP.put("Double", new MongoDataType(Types.DOUBLE, "DOUBLE", Double.class));
+ TYPE_MAP.put("Boolean", new MongoDataType(Types.BOOLEAN, "BOOLEAN", Boolean.class));
+ TYPE_MAP.put("Date", new MongoDataType(Types.TIMESTAMP, "TIMESTAMP", java.sql.Timestamp.class));
+ TYPE_MAP.put("ArrayList", new MongoDataType(Types.ARRAY, "ARRAY", java.util.ArrayList.class));
+ TYPE_MAP.put("Document", new MongoDataType(Types.OTHER, "OBJECT", Object.class));
+ TYPE_MAP.put("Binary", new MongoDataType(Types.BINARY, "BINARY", byte[].class));
+ TYPE_MAP.put("Decimal128", new MongoDataType(Types.DECIMAL, "DECIMAL", java.math.BigDecimal.class));
+ TYPE_MAP.put("NULL", new MongoDataType(Types.NULL, "NULL", Object.class));
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryAggregateIterable.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryAggregateIterable.java
new file mode 100644
index 0000000000..b2bc2d98b5
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryAggregateIterable.java
@@ -0,0 +1,225 @@
+package io.edurt.datacap.driver.iterable;
+
+import com.mongodb.ExplainVerbosity;
+import com.mongodb.Function;
+import com.mongodb.ServerAddress;
+import com.mongodb.ServerCursor;
+import com.mongodb.client.AggregateIterable;
+import com.mongodb.client.MongoCursor;
+import com.mongodb.client.MongoIterable;
+import com.mongodb.client.model.Collation;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.bson.BsonValue;
+import org.bson.Document;
+import org.bson.conversions.Bson;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.NoSuchElementException;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
+
+@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2"})
+public class InMemoryAggregateIterable
+ implements AggregateIterable
+{
+ private final List documents;
+
+ public InMemoryAggregateIterable(List documents)
+ {
+ this.documents = documents;
+ }
+
+ @Override
+ public MongoCursor iterator()
+ {
+ return new InMemoryMongoCursor(documents);
+ }
+
+ @Override
+ public MongoCursor cursor()
+ {
+ return null;
+ }
+
+ // Interface implementations with minimal implementation
+ @Override
+ public AggregateIterable batchSize(int size)
+ {
+ return this;
+ }
+
+ @Override
+ public void toCollection()
+ {}
+
+ @Override
+ public AggregateIterable allowDiskUse(Boolean allowDiskUse)
+ {
+ return this;
+ }
+
+ @Override
+ public AggregateIterable maxTime(long maxTime, TimeUnit timeUnit)
+ {
+ return this;
+ }
+
+ @Override
+ public AggregateIterable maxAwaitTime(long maxAwaitTime, TimeUnit timeUnit)
+ {
+ return this;
+ }
+
+ @Override
+ public AggregateIterable bypassDocumentValidation(Boolean bypassDocumentValidation)
+ {
+ return this;
+ }
+
+ @Override
+ public AggregateIterable collation(Collation collation)
+ {
+ return this;
+ }
+
+ @Override
+ public AggregateIterable comment(String comment)
+ {
+ return this;
+ }
+
+ @Override
+ public AggregateIterable comment(BsonValue bsonValue)
+ {
+ return null;
+ }
+
+ @Override
+ public AggregateIterable hint(Bson hint)
+ {
+ return this;
+ }
+
+ @Override
+ public AggregateIterable hintString(String s)
+ {
+ return null;
+ }
+
+ @Override
+ public AggregateIterable let(Bson bson)
+ {
+ return null;
+ }
+
+ @Override
+ public Document explain()
+ {
+ return null;
+ }
+
+ @Override
+ public Document explain(ExplainVerbosity explainVerbosity)
+ {
+ return null;
+ }
+
+ @Override
+ public E explain(Class aClass)
+ {
+ return null;
+ }
+
+ @Override
+ public E explain(Class aClass, ExplainVerbosity explainVerbosity)
+ {
+ return null;
+ }
+
+ @Override
+ public void forEach(Consumer super Document> action)
+ {
+ MongoCursor cursor = iterator();
+ while (cursor.hasNext()) {
+ action.accept(cursor.next());
+ }
+ }
+
+ @Override
+ public > A into(A target)
+ {
+ forEach(target::add);
+ return target;
+ }
+
+ @Override
+ public Document first()
+ {
+ MongoCursor cursor = iterator();
+ return cursor.hasNext() ? cursor.next() : null;
+ }
+
+ @Override
+ public MongoIterable map(Function mapper)
+ {
+ throw new UnsupportedOperationException("Map operation not supported");
+ }
+
+ private static class InMemoryMongoCursor
+ implements MongoCursor
+ {
+ private final List results;
+ private int position = 0;
+
+ public InMemoryMongoCursor(List results)
+ {
+ this.results = results;
+ }
+
+ @Override
+ public void close()
+ {
+ // No resources to close
+ }
+
+ @Override
+ public boolean hasNext()
+ {
+ return position < results.size();
+ }
+
+ @Override
+ public Document next()
+ {
+ if (!hasNext()) {
+ throw new NoSuchElementException();
+ }
+ return results.get(position++);
+ }
+
+ @Override
+ public int available()
+ {
+ return 0;
+ }
+
+ @Override
+ public Document tryNext()
+ {
+ return hasNext() ? next() : null;
+ }
+
+ @Override
+ public ServerCursor getServerCursor()
+ {
+ return null;
+ }
+
+ @Override
+ public ServerAddress getServerAddress()
+ {
+ return null;
+ }
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryFindIterable.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryFindIterable.java
new file mode 100644
index 0000000000..4d8742be40
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryFindIterable.java
@@ -0,0 +1,276 @@
+package io.edurt.datacap.driver.iterable;
+
+import com.mongodb.CursorType;
+import com.mongodb.ExplainVerbosity;
+import com.mongodb.Function;
+import com.mongodb.ServerAddress;
+import com.mongodb.ServerCursor;
+import com.mongodb.client.FindIterable;
+import com.mongodb.client.MongoCursor;
+import com.mongodb.client.MongoIterable;
+import com.mongodb.client.model.Collation;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.bson.BsonValue;
+import org.bson.Document;
+import org.bson.conversions.Bson;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2"})
+public class InMemoryFindIterable
+ implements FindIterable
+{
+ private final List documents;
+
+ public InMemoryFindIterable(List documents)
+ {
+ this.documents = documents;
+ }
+
+ @Override
+ public MongoCursor iterator()
+ {
+ return new InMemoryMongoCursor(documents);
+ }
+
+ @Override
+ public MongoCursor cursor()
+ {
+ return null;
+ }
+
+ @Override
+ public Document first()
+ {
+ return documents.isEmpty() ? null : documents.get(0);
+ }
+
+ @Override
+ public MongoIterable map(Function function)
+ {
+ return null;
+ }
+
+ @Override
+ public > A into(A objects)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable filter(Bson filter)
+ {
+ return this;
+ }
+
+ @Override
+ public FindIterable limit(int limit)
+ {
+ return this;
+ }
+
+ @Override
+ public FindIterable skip(int skip)
+ {
+ return this;
+ }
+
+ @Override
+ public FindIterable maxTime(long maxTime, TimeUnit timeUnit)
+ {
+ return this;
+ }
+
+ @Override
+ public FindIterable maxAwaitTime(long l, TimeUnit timeUnit)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable projection(Bson bson)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable sort(Bson bson)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable noCursorTimeout(boolean b)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable oplogReplay(boolean b)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable partial(boolean b)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable cursorType(CursorType cursorType)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable batchSize(int batchSize)
+ {
+ return this;
+ }
+
+ @Override
+ public FindIterable collation(Collation collation)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable comment(String s)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable comment(BsonValue bsonValue)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable hint(Bson bson)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable hintString(String s)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable let(Bson bson)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable max(Bson bson)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable min(Bson bson)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable returnKey(boolean b)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable showRecordId(boolean b)
+ {
+ return null;
+ }
+
+ @Override
+ public FindIterable allowDiskUse(Boolean aBoolean)
+ {
+ return null;
+ }
+
+ @Override
+ public Document explain()
+ {
+ return null;
+ }
+
+ @Override
+ public Document explain(ExplainVerbosity explainVerbosity)
+ {
+ return null;
+ }
+
+ @Override
+ public E explain(Class aClass)
+ {
+ return null;
+ }
+
+ @Override
+ public E explain(Class aClass, ExplainVerbosity explainVerbosity)
+ {
+ return null;
+ }
+
+ private static class InMemoryMongoCursor
+ implements MongoCursor
+ {
+ private final List documents;
+ private int position = 0;
+
+ public InMemoryMongoCursor(List documents)
+ {
+ this.documents = documents;
+ }
+
+ @Override
+ public void close()
+ {
+ }
+
+ @Override
+ public boolean hasNext()
+ {
+ return position < documents.size();
+ }
+
+ @Override
+ public Document next()
+ {
+ return documents.get(position++);
+ }
+
+ @Override
+ public int available()
+ {
+ return 0;
+ }
+
+ @Override
+ public Document tryNext()
+ {
+ return hasNext() ? next() : null;
+ }
+
+ @Override
+ public ServerCursor getServerCursor()
+ {
+ return null;
+ }
+
+ @Override
+ public ServerAddress getServerAddress()
+ {
+ return null;
+ }
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoParser.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoParser.java
new file mode 100644
index 0000000000..dc49e970ac
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoParser.java
@@ -0,0 +1,45 @@
+package io.edurt.datacap.driver.parser;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import io.edurt.datacap.sql.SQLParser;
+import io.edurt.datacap.sql.statement.SQLStatement;
+import io.edurt.datacap.sql.statement.SelectStatement;
+import io.edurt.datacap.sql.statement.ShowStatement;
+import lombok.Getter;
+import lombok.Setter;
+import org.bson.Document;
+
+import java.util.List;
+
+@Getter
+@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"})
+public class MongoParser
+{
+ protected Document filter;
+ protected List fields;
+ protected String command;
+ protected Document query;
+ protected String collection;
+ protected ShowStatement.ShowType showType;
+
+ @Setter
+ protected String database;
+
+ // Parse SQL statement
+ // 解析SQL语句
+ public static MongoParser createParser(String sql)
+ {
+ if (sql == null || sql.trim().isEmpty()) {
+ throw new IllegalArgumentException("SQL query cannot be null or empty");
+ }
+
+ SQLStatement statement = SQLParser.parse(sql.trim());
+ if (statement instanceof SelectStatement) {
+ return new MongoSelectParser((SelectStatement) statement);
+ }
+ else if (statement instanceof ShowStatement) {
+ return new MongoShowParser((ShowStatement) statement);
+ }
+ throw new IllegalArgumentException("Unsupported SQL operation: " + sql);
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoSelectParser.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoSelectParser.java
new file mode 100644
index 0000000000..6349a4daa7
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoSelectParser.java
@@ -0,0 +1,368 @@
+package io.edurt.datacap.driver.parser;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import io.edurt.datacap.sql.node.Expression;
+import io.edurt.datacap.sql.node.clause.LimitClause;
+import io.edurt.datacap.sql.node.element.OrderByElement;
+import io.edurt.datacap.sql.node.element.SelectElement;
+import io.edurt.datacap.sql.node.element.TableElement;
+import io.edurt.datacap.sql.statement.SelectStatement;
+import lombok.Getter;
+import org.bson.Document;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+@Getter
+@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "CT_CONSTRUCTOR_THROW"})
+public class MongoSelectParser
+ extends MongoParser
+{
+ private final Map fieldAliasMap = new HashMap<>();
+ private final Map aliasToFieldMap = new HashMap<>();
+
+ public MongoSelectParser(SelectStatement statement)
+ {
+ parseSelectStatement(statement);
+ }
+
+ // Parse SELECT statement
+ // 解析SELECT语句
+ public void parseSelectStatement(SelectStatement select)
+ {
+ List elements = select.getSelectElements();
+ if (elements != null && elements.size() == 1) {
+ SelectElement element = elements.get(0);
+ if (element.getExpression() != null &&
+ element.getExpression().getType() == Expression.ExpressionType.FUNCTION &&
+ "VERSION".equalsIgnoreCase(element.getExpression().getValue().toString())) {
+ this.query = new Document("buildInfo", 1);
+ return;
+ }
+ }
+
+ // Get collection name first
+ parseFromClause(select.getFromSources());
+
+ // Parse select elements to set fields
+ parseSelectElements(select.getSelectElements());
+
+ // Initialize an aggregation pipeline
+ List pipeline = new ArrayList<>();
+
+ // Add $match stage for WHERE conditions
+ if (select.getWhereClause() != null) {
+ Object queryResult = parseExpression(select.getWhereClause());
+ Document matchStage = new Document("$match",
+ queryResult instanceof Document ? queryResult : new Document("$eq", queryResult));
+ pipeline.add(matchStage);
+ }
+
+ // Add $project stage for field selection
+ // Add $group stage if GROUP BY exists
+ if (select.getGroupByElements() != null && !select.getGroupByElements().isEmpty()) {
+ Document groupStage = parseGroupByClause(select.getGroupByElements(), select.getSelectElements());
+ pipeline.add(new Document("$group", groupStage));
+ }
+ // If no GROUP BY, add normal $project stage
+ else if (fields != null && !fields.isEmpty() &&
+ !(fields.size() == 1 && fields.get(0).equals("*"))) {
+ Document projectStage = new Document();
+ projectStage.put("_id", 0);
+
+ // Create field mappings in $project stage
+ for (SelectElement element : select.getSelectElements()) {
+ String originalField = element.getColumn() != null ?
+ element.getColumn() :
+ element.getExpression().getValue().toString();
+
+ String alias = element.getAlias();
+ if (alias != null) {
+ projectStage.put(alias, "$" + originalField);
+ }
+ else {
+ projectStage.put(originalField, 1);
+ }
+ }
+ pipeline.add(new Document("$project", projectStage));
+ }
+
+ // Add $sort stage if ORDER BY exists
+ if (select.getOrderByElements() != null && !select.getOrderByElements().isEmpty()) {
+ Document sortStage = new Document("$sort", parseOrderByElements(select.getOrderByElements()));
+ pipeline.add(sortStage);
+ }
+
+ // Add $skip and $limit stages if present
+ LimitClause limitClause = select.getLimitClause();
+ if (limitClause != null) {
+ if (limitClause.getOffset() > 0) {
+ pipeline.add(new Document("$skip", (int) limitClause.getOffset()));
+ }
+ if (limitClause.getLimit() >= 0) {
+ pipeline.add(new Document("$limit", (int) limitClause.getLimit()));
+ }
+ }
+
+ // Set the final query
+ this.query = new Document("aggregate", this.collection)
+ .append("pipeline", pipeline)
+ .append("cursor", new Document());
+ }
+
+ // Parse SELECT elements to field list
+ // 解析SELECT元素到字段列表
+ private void parseSelectElements(List elements)
+ {
+ this.fields = new ArrayList<>();
+ if (elements != null) {
+ for (SelectElement element : elements) {
+ String field;
+ // Get field name (from column name or expression)
+ if (element.getColumn() != null) {
+ field = element.getColumn();
+ }
+ else if (element.getExpression() != null) {
+ Expression expr = element.getExpression();
+ if (expr.getType() == Expression.ExpressionType.FUNCTION &&
+ "VERSION".equalsIgnoreCase(expr.getValue().toString())) {
+ field = "version";
+ }
+ else {
+ field = parseExpression(expr).toString();
+ }
+ }
+ else {
+ continue;
+ }
+
+ // Handle alias mapping
+ if (element.getAlias() != null) {
+ fieldAliasMap.put(field, element.getAlias());
+ aliasToFieldMap.put(element.getAlias(), field);
+ fields.add(element.getAlias());
+ }
+ else {
+ fields.add(field);
+ }
+ }
+ }
+ }
+
+ // Parse FROM clause to get collection name
+ // 解析FROM子句获取集合名称
+ private void parseFromClause(List fromSources)
+ {
+ if (fromSources != null && !fromSources.isEmpty()) {
+ TableElement mainTable = fromSources.get(0);
+ this.collection = mainTable.getTableName();
+
+ // MongoDB doesn't support JOINs
+ // MongoDB不支持JOIN操作
+ if (mainTable.getJoins() != null && !mainTable.getJoins().isEmpty()) {
+ throw new IllegalArgumentException("MongoDB does not support JOIN operations");
+ }
+ }
+ }
+
+ private Object parseExpression(Expression expr)
+ {
+ if (expr == null) {
+ return null;
+ }
+
+ switch (expr.getType()) {
+ case LITERAL:
+ return parseValue(expr.getValue().toString());
+
+ case COLUMN_REFERENCE:
+ return expr.getValue().toString();
+
+ case BINARY_OP:
+ String operator = expr.getValue().toString();
+ List children = expr.getChildren();
+
+ // Handle logical operators (AND, OR)
+ if ("AND".equalsIgnoreCase(operator) || "OR".equalsIgnoreCase(operator)) {
+ List conditions = new ArrayList<>();
+ for (Expression child : children) {
+ Object result = parseExpression(child);
+ if (result instanceof Document) {
+ conditions.add((Document) result);
+ }
+ }
+ return new Document(operator.equalsIgnoreCase("AND") ? "$and" : "$or", conditions);
+ }
+
+ // Handle comparison operators
+ if (children != null && children.size() == 2) {
+ Expression left = children.get(0);
+ Expression right = children.get(1);
+
+ String field = parseExpression(left).toString();
+ Object value = parseExpression(right);
+
+ Document condition = new Document();
+ switch (operator) {
+ case "=":
+ condition.put(field, value);
+ break;
+ case ">":
+ condition.put(field, new Document("$gt", value));
+ break;
+ case "<":
+ condition.put(field, new Document("$lt", value));
+ break;
+ case ">=":
+ condition.put(field, new Document("$gte", value));
+ break;
+ case "<=":
+ condition.put(field, new Document("$lte", value));
+ break;
+ case "!=":
+ condition.put(field, new Document("$ne", value));
+ break;
+ case "LIKE":
+ String pattern = value.toString().replace("%", ".*");
+ condition.put(field, Pattern.compile(pattern, Pattern.CASE_INSENSITIVE));
+ break;
+ case "IN":
+ condition.put(field, new Document("$in", value));
+ break;
+ default:
+ throw new IllegalArgumentException("Unsupported operator: " + operator);
+ }
+ return condition;
+ }
+
+ throw new IllegalArgumentException("Invalid binary expression structure");
+
+ case FUNCTION:
+ if ("VERSION".equalsIgnoreCase(expr.getValue().toString())) {
+ return new Document("$buildInfo", 1);
+ }
+
+ throw new IllegalArgumentException("Unsupported function: " + expr.getValue());
+
+ default:
+ throw new IllegalArgumentException("Unsupported expression type: " + expr.getType());
+ }
+ }
+
+ // Parse ORDER BY elements to MongoDB sort document
+ // 解析ORDER BY元素到MongoDB排序文档
+ private Document parseOrderByElements(List elements)
+ {
+ Document orderBy = new Document();
+ for (OrderByElement element : elements) {
+ String field = element.getExpression().getValue().toString();
+ orderBy.put(field, element.isAscending() ? 1 : -1);
+ }
+ return orderBy;
+ }
+
+ // Parse string value to appropriate type
+ // 将字符串值解析为适当的类型
+ private Object parseValue(String value)
+ {
+ value = value.trim();
+
+ // Remove quotes if present
+ // 如果有引号则移除
+ if (value.startsWith("'") && value.endsWith("'")) {
+ return value.substring(1, value.length() - 1);
+ }
+
+ // Try parsing as number
+ // 尝试解析为数字
+ try {
+ if (value.contains(".")) {
+ return Double.parseDouble(value);
+ }
+ else {
+ return Long.parseLong(value);
+ }
+ }
+ catch (NumberFormatException e) {
+ // Return as string if not a number
+ // 如果不是数字则返回字符串
+ return value;
+ }
+ }
+
+ private Document parseGroupByClause(List groupByColumns, List selectElements)
+ {
+ Document groupStage = new Document();
+
+ // Handle _id field for grouping
+ if (groupByColumns.size() == 1 && groupByColumns.get(0).getValue().equals("_id")) {
+ groupStage.put("_id", "$" + groupByColumns.get(0).getValue());
+ }
+ else {
+ // Multiple group by columns
+ Document idDoc = new Document();
+ for (Expression expr : groupByColumns) {
+ String field = expr.getValue().toString();
+ idDoc.put(field, "$" + field);
+ }
+ groupStage.put("_id", idDoc);
+ }
+
+ // Handle aggregation functions in SELECT clause
+ for (SelectElement element : selectElements) {
+ if (element.getExpression() != null) {
+ Expression expr = element.getExpression();
+ if (expr.getType() == Expression.ExpressionType.FUNCTION) {
+ String functionName = expr.getValue().toString().toUpperCase();
+ String field = expr.getChildren().get(0).getValue().toString();
+ String alias = element.getAlias() != null ? element.getAlias() : functionName + "_" + field;
+
+ switch (functionName) {
+ case "COUNT":
+ groupStage.put(alias, new Document("$sum", 1));
+ break;
+ case "SUM":
+ groupStage.put(alias, new Document("$sum", "$" + field));
+ break;
+ case "AVG":
+ groupStage.put(alias, new Document("$avg", "$" + field));
+ break;
+ case "MIN":
+ groupStage.put(alias, new Document("$min", "$" + field));
+ break;
+ case "MAX":
+ groupStage.put(alias, new Document("$max", "$" + field));
+ break;
+ default:
+ throw new IllegalArgumentException("Unsupported aggregation function: " + functionName);
+ }
+ }
+ else {
+ // Handle non-aggregated fields that are part of GROUP BY
+ String field = expr.getValue().toString();
+ if (isFieldInGroupBy(field, groupByColumns)) {
+ groupStage.put(field, new Document("$first", "$" + field));
+ }
+ }
+ }
+ else if (element.getColumn() != null) {
+ // Handle simple columns that are part of GROUP BY
+ String field = element.getColumn();
+ if (isFieldInGroupBy(field, groupByColumns)) {
+ groupStage.put(field, new Document("$first", "$" + field));
+ }
+ }
+ }
+
+ return groupStage;
+ }
+
+ private boolean isFieldInGroupBy(String field, List groupByColumns)
+ {
+ return groupByColumns.stream()
+ .anyMatch(expr -> expr.getValue().toString().equals(field));
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoShowParser.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoShowParser.java
new file mode 100644
index 0000000000..1619464ca4
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoShowParser.java
@@ -0,0 +1,76 @@
+package io.edurt.datacap.driver.parser;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import io.edurt.datacap.sql.statement.ShowStatement;
+import lombok.Getter;
+import org.bson.Document;
+
+@Getter
+@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "CT_CONSTRUCTOR_THROW"})
+public class MongoShowParser
+ extends MongoParser
+{
+ public MongoShowParser(ShowStatement statement)
+ {
+ parseShowStatement(statement);
+ }
+
+ public void parseShowStatement(ShowStatement show)
+ {
+ this.showType = show.getShowType();
+ switch (show.getShowType()) {
+ case DATABASES:
+ this.command = "listDatabases";
+ if (show.getPattern() != null) {
+ // Convert SQL LIKE pattern to MongoDB regex pattern
+ String pattern = convertLikeToRegex(show.getPattern());
+ this.filter = new Document("name", new Document("$regex", pattern));
+ }
+ break;
+
+ case TABLES:
+ this.command = "listCollections";
+ if (show.getDatabaseName() != null) {
+ this.database = show.getDatabaseName();
+ }
+ if (show.getPattern() != null) {
+ String pattern = convertLikeToRegex(show.getPattern());
+ this.filter = new Document("name", new Document("$regex", pattern));
+ }
+ break;
+
+ case COLUMNS:
+ this.command = "listFields";
+ if (show.getDatabaseName() != null) {
+ this.database = show.getDatabaseName();
+ }
+ if (show.getTableName() != null) {
+ this.collection = show.getTableName();
+ }
+ if (show.getPattern() != null) {
+ String pattern = convertLikeToRegex(show.getPattern());
+ this.filter = new Document("name", new Document("$regex", pattern));
+ }
+ break;
+
+ default:
+ throw new IllegalArgumentException("Unsupported SHOW type: " + show.getShowType());
+ }
+ }
+
+ private String convertLikeToRegex(String likePattern)
+ {
+ // Remove quotes if present
+ if (likePattern.startsWith("'") && likePattern.endsWith("'")) {
+ likePattern = likePattern.substring(1, likePattern.length() - 1);
+ }
+ else if (likePattern.startsWith("\"") && likePattern.endsWith("\"")) {
+ likePattern = likePattern.substring(1, likePattern.length() - 1);
+ }
+
+ // Convert SQL LIKE pattern to MongoDB regex pattern
+ return likePattern
+ .replace("%", ".*") // % matches any sequence of characters
+ .replace("_", "."); // _ matches any single character
+ }
+}
diff --git a/driver/datacap-driver-mongodb/src/main/resources/META-INF/services/java.sql.Driver b/driver/datacap-driver-mongodb/src/main/resources/META-INF/services/java.sql.Driver
new file mode 100644
index 0000000000..c2e0372e5b
--- /dev/null
+++ b/driver/datacap-driver-mongodb/src/main/resources/META-INF/services/java.sql.Driver
@@ -0,0 +1,2 @@
+io.edurt.datacap.driver.MongoJdbcDriver
+com.dbschema.MongoJdbcDriver
diff --git a/driver/datacap-driver-redis/pom.xml b/driver/datacap-driver-redis/pom.xml
index e9e28557c8..097cbe64f6 100644
--- a/driver/datacap-driver-redis/pom.xml
+++ b/driver/datacap-driver-redis/pom.xml
@@ -6,7 +6,7 @@
datacap
io.edurt.datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/executor/datacap-executor-local/pom.xml b/executor/datacap-executor-local/pom.xml
index 10abf20930..84b455dbfb 100644
--- a/executor/datacap-executor-local/pom.xml
+++ b/executor/datacap-executor-local/pom.xml
@@ -4,7 +4,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/executor/datacap-executor-seatunnel/pom.xml b/executor/datacap-executor-seatunnel/pom.xml
index bd433e705b..c4c01bf981 100644
--- a/executor/datacap-executor-seatunnel/pom.xml
+++ b/executor/datacap-executor-seatunnel/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/executor/datacap-executor-spi/pom.xml b/executor/datacap-executor-spi/pom.xml
index d8828f1e96..0edbf1a85b 100644
--- a/executor/datacap-executor-spi/pom.xml
+++ b/executor/datacap-executor-spi/pom.xml
@@ -4,7 +4,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/fs/datacap-fs-alioss/pom.xml b/fs/datacap-fs-alioss/pom.xml
index 767f518da2..1d50110e00 100644
--- a/fs/datacap-fs-alioss/pom.xml
+++ b/fs/datacap-fs-alioss/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/fs/datacap-fs-amazon-s3/pom.xml b/fs/datacap-fs-amazon-s3/pom.xml
index d58400015d..b4feb1356f 100644
--- a/fs/datacap-fs-amazon-s3/pom.xml
+++ b/fs/datacap-fs-amazon-s3/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/fs/datacap-fs-local/pom.xml b/fs/datacap-fs-local/pom.xml
index 3e849f9c07..aa1ba41fe3 100644
--- a/fs/datacap-fs-local/pom.xml
+++ b/fs/datacap-fs-local/pom.xml
@@ -3,7 +3,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/fs/datacap-fs-minio/pom.xml b/fs/datacap-fs-minio/pom.xml
index ebfef8fec9..42b9dc5dbd 100644
--- a/fs/datacap-fs-minio/pom.xml
+++ b/fs/datacap-fs-minio/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/fs/datacap-fs-qiniu/pom.xml b/fs/datacap-fs-qiniu/pom.xml
index a8c98f3a22..c9416e484d 100644
--- a/fs/datacap-fs-qiniu/pom.xml
+++ b/fs/datacap-fs-qiniu/pom.xml
@@ -4,7 +4,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/fs/datacap-fs-spi/pom.xml b/fs/datacap-fs-spi/pom.xml
index af5984d2a1..a5850c1ca7 100644
--- a/fs/datacap-fs-spi/pom.xml
+++ b/fs/datacap-fs-spi/pom.xml
@@ -3,7 +3,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/fs/datacap-fs-tencent-cos/pom.xml b/fs/datacap-fs-tencent-cos/pom.xml
index 7d26ca6e48..fbc58e624d 100644
--- a/fs/datacap-fs-tencent-cos/pom.xml
+++ b/fs/datacap-fs-tencent-cos/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/lib/datacap-http/pom.xml b/lib/datacap-http/pom.xml
index 73e745f396..4d2664a4c1 100644
--- a/lib/datacap-http/pom.xml
+++ b/lib/datacap-http/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/lib/datacap-logger/pom.xml b/lib/datacap-logger/pom.xml
index 595ec05d35..871f589fd0 100644
--- a/lib/datacap-logger/pom.xml
+++ b/lib/datacap-logger/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/lib/datacap-schedule/pom.xml b/lib/datacap-schedule/pom.xml
index 9f0f500e71..d6d577078f 100644
--- a/lib/datacap-schedule/pom.xml
+++ b/lib/datacap-schedule/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/lib/datacap-shell/pom.xml b/lib/datacap-shell/pom.xml
index 3bb03912bf..4700590da1 100644
--- a/lib/datacap-shell/pom.xml
+++ b/lib/datacap-shell/pom.xml
@@ -6,7 +6,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/notify/datacap-notify-dingtalk/pom.xml b/notify/datacap-notify-dingtalk/pom.xml
index 1e7f5c743f..b13836e147 100644
--- a/notify/datacap-notify-dingtalk/pom.xml
+++ b/notify/datacap-notify-dingtalk/pom.xml
@@ -5,7 +5,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/notify/datacap-notify-spi/pom.xml b/notify/datacap-notify-spi/pom.xml
index a1205e2b49..b217779a68 100644
--- a/notify/datacap-notify-spi/pom.xml
+++ b/notify/datacap-notify-spi/pom.xml
@@ -5,7 +5,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/parser/datacap-parser-mysql/pom.xml b/parser/datacap-parser-mysql/pom.xml
index 55c4d038c0..fe1eb9c24a 100644
--- a/parser/datacap-parser-mysql/pom.xml
+++ b/parser/datacap-parser-mysql/pom.xml
@@ -4,7 +4,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/parser/datacap-parser-spi/pom.xml b/parser/datacap-parser-spi/pom.xml
index e6db254e90..22729ab176 100644
--- a/parser/datacap-parser-spi/pom.xml
+++ b/parser/datacap-parser-spi/pom.xml
@@ -4,7 +4,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/parser/datacap-parser-trino/pom.xml b/parser/datacap-parser-trino/pom.xml
index 12a596c0aa..8b2f43afc3 100644
--- a/parser/datacap-parser-trino/pom.xml
+++ b/parser/datacap-parser-trino/pom.xml
@@ -4,7 +4,7 @@
io.edurt.datacap
datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/plugin/datacap-plugin-alioss/pom.xml b/plugin/datacap-plugin-alioss/pom.xml
index 0adcf56993..f83665a5c5 100644
--- a/plugin/datacap-plugin-alioss/pom.xml
+++ b/plugin/datacap-plugin-alioss/pom.xml
@@ -6,7 +6,7 @@
datacap
io.edurt.datacap
- 2024.4.0
+ 2024.4.1-SNAPSHOT
../../pom.xml
diff --git a/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossAdapter.java b/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossAdapter.java
index f60df371e0..42739f71a4 100644
--- a/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossAdapter.java
+++ b/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossAdapter.java
@@ -2,20 +2,20 @@
import com.aliyun.oss.OSS;
import com.aliyun.oss.model.ObjectListing;
-import com.google.common.base.Preconditions;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import io.edurt.datacap.spi.adapter.NativeAdapter;
import io.edurt.datacap.spi.model.Configure;
import io.edurt.datacap.spi.model.Response;
import io.edurt.datacap.spi.model.Time;
-import io.edurt.datacap.sql.SqlBase;
+import io.edurt.datacap.sql.node.element.SelectElement;
+import io.edurt.datacap.sql.statement.SelectStatement;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.ObjectUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
+import java.util.stream.Collectors;
@Slf4j
@SuppressFBWarnings(value = {"RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE", "REC_CATCH_EXCEPTION"},
@@ -45,24 +45,22 @@ public Response handlerExecute(String content)
List types = new ArrayList<>();
List