diff --git a/.github/workflows/bofore_checker.yml b/.github/workflows/bofore_checker.yml index 8b2e268d40..6225acf1a9 100644 --- a/.github/workflows/bofore_checker.yml +++ b/.github/workflows/bofore_checker.yml @@ -123,6 +123,7 @@ jobs: -Dcos.access="$COS_ACCESS" -Dcos.secret="$COS_SECRET" -Dcos.bucket="$COS_BUCKET" -Dcos.endpoint="$COS_ENDPOINT" \ -Ds3.access="$S3_ACCESS" -Ds3.secret="$S3_SECRET" -Ds3.bucket="$S3_BUCKET" -Ds3.endpoint="$S3_ENDPOINT" + before_checker_package: runs-on: ubuntu-latest needs: diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml index 9068d2fd1a..5bf457cdaa 100644 --- a/.github/workflows/publish-docker.yml +++ b/.github/workflows/publish-docker.yml @@ -36,6 +36,7 @@ jobs: chmod 755 ./mvnw ./mvnw clean install package -Dspotbugs.skip -Dgpg.skip -Dcheckstyle.skip -DskipTests=true -q + - name: Cache binary id: cache-binary uses: actions/cache@v3 @@ -44,7 +45,7 @@ jobs: with: path: | ./Dockerfile - ./dist/datacap-server-${{ needs.before-compile.outputs.APP_VERSION }}-bin.tar.gz + ./dist/datacap-server-${{ steps.apply_version.outputs.APP_VERSION }}-bin.tar.gz key: ${{ runner.os }}-build-${{ env.cache-name }} restore-keys: ${{ runner.os }}-build-${{ env.cache-name }} diff --git a/.gitignore b/.gitignore index bc41b9c43f..32849a44ac 100644 --- a/.gitignore +++ b/.gitignore @@ -62,6 +62,10 @@ depcheck-output.json parser/datacap-parser-mysql/src/main/java parser/datacap-parser-trino/src/main/java/*.tokens parser/datacap-parser-trino/src/main/java/io/edurt/datacap/parser/antlr +core/datacap-parser/src/main/java/*.tokens +core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/*.java +core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/*.interp +!core/datacap-parser/src/main/java/io/edurt/datacap/sql/node # Skip convert # convert/datacap-convert-txt/*.txt diff --git a/client/datacap-cli/pom.xml b/client/datacap-cli/pom.xml index 8e4270ec0d..eea2dff8c0 100644 --- a/client/datacap-cli/pom.xml +++ b/client/datacap-cli/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/configure/etc/bin/install-plugin.sh b/configure/etc/bin/install-plugin.sh index 5d7c639015..0e50b68e9b 100644 --- a/configure/etc/bin/install-plugin.sh +++ b/configure/etc/bin/install-plugin.sh @@ -1,7 +1,7 @@ #!/bin/sh HOME=$(pwd) -VERSION=2024.4.0 +VERSION=2024.4.1-SNAPSHOT CDN_CENTER="https://repo1.maven.org/maven2/io/edurt/datacap" install_package() { diff --git a/configure/etc/conf/application.properties b/configure/etc/conf/application.properties index 730082c139..e143a29af4 100644 --- a/configure/etc/conf/application.properties +++ b/configure/etc/conf/application.properties @@ -1,5 +1,5 @@ ################################### Banner configure ################################# -app.version=2024.4.0 +app.version=2024.4.1-SNAPSHOT ################################### Basic configure ################################# server.port=9096 diff --git a/configure/metadata.json b/configure/metadata.json index 2864eaab4d..106d382a30 100644 --- a/configure/metadata.json +++ b/configure/metadata.json @@ -10,15 +10,15 @@ "description": "A powerful MySQL integration plugin that provides comprehensive database connectivity. Features include table/view management, data import/export, SQL query execution, stored procedure support, and real-time data synchronization. Optimized for high-performance data operations with connection pooling and batch processing capabilities.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/mysql.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "8.0", "5.7" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mysql/2024.4.0/datacap-plugin-mysql-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mysql/2024.4.1-SNAPSHOT/datacap-plugin-mysql-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-clickhouse", @@ -26,14 +26,14 @@ "description": "ClickHouse is a distributed column-oriented database. It is a distributed database with a focus on performance, scalability, and ease of use.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/clickhouse.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-clickhouse/2024.4.0/datacap-plugin-clickhouse-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-clickhouse/2024.4.1-SNAPSHOT/datacap-plugin-clickhouse-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-influxdb", @@ -41,14 +41,14 @@ "description": "InfluxDB is a time series database that stores and retrieves data points. It is a distributed database with a focus on performance, scalability, and ease of use.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/influxdb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-influxdb/2024.4.0/datacap-plugin-influxdb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-influxdb/2024.4.1-SNAPSHOT/datacap-plugin-influxdb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-solr", @@ -56,14 +56,14 @@ "description": "Solr is highly reliable, scalable and fault tolerant, providing distributed indexing, replication and load-balanced querying, automated failover and recovery, centralized configuration and more. Solr powers the search and navigation features of many of the world's largest internet sites.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/solr.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-solr/2024.4.0/datacap-plugin-solr-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-solr/2024.4.1-SNAPSHOT/datacap-plugin-solr-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-cratedb", @@ -71,14 +71,14 @@ "description": "CrateDB is a hyper-fast database for real-time analytics and hybrid search. It handles multiple types of data and combines the simplicity of SQL with the scalability of a distributed architecture.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/cratedb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cratedb/2024.4.0/datacap-plugin-cratedb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cratedb/2024.4.1-SNAPSHOT/datacap-plugin-cratedb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-cratedb-http", @@ -86,14 +86,14 @@ "description": "Use HTTP to access the remote CrateDB database", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/cratedbhttp.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cratedb-http/2024.4.0/datacap-plugin-cratedbhttp-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cratedb-http/2024.4.1-SNAPSHOT/datacap-plugin-cratedb-http-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-zookeeper", @@ -101,14 +101,14 @@ "description": "Zookeeper is a distributed, distributed configuration service. It is a distributed database with a focus on performance, scalability, and ease of use.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/zookeeper.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-zookeeper/2024.4.0/datacap-plugin-zookeeper-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-zookeeper/2024.4.1-SNAPSHOT/datacap-plugin-zookeeper-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-kafka", @@ -116,14 +116,14 @@ "description": "Apache Kafka is an open-source distributed event streaming platform used by thousands of companies for high-performance data pipelines, streaming analytics, data integration, and mission-critical applications.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/kafka.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kafka/2024.4.0/datacap-plugin-kafka-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kafka/2024.4.1-SNAPSHOT/datacap-plugin-kafka-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-redis", @@ -131,14 +131,14 @@ "description": "Redis is an in-memory data store used by millions of developers as a cache, vector database, document database, streaming engine, and message broker. Redis has built-in replication and different levels of on-disk persistence. It supports complex data types (for example, strings, hashes, lists, sets, sorted sets, and JSON), with atomic operations defined on those data types.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/redis.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-redis/2024.4.0/datacap-plugin-redis-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-redis/2024.4.1-SNAPSHOT/datacap-plugin-redis-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-h2", @@ -146,14 +146,14 @@ "description": "H2 is a small, fast, and easy-to-use in-memory database.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/h2.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-h2/2024.4.0/datacap-plugin-h2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-h2/2024.4.1-SNAPSHOT/datacap-plugin-h2-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-db2", @@ -161,14 +161,14 @@ "description": "IBM® Db2® is the cloud-native database built to power low-latency transactions, real-time analytics and AI applications at scale.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/db2.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-db2/2024.4.0/datacap-plugin-db2-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-db2/2024.4.1-SNAPSHOT/datacap-plugin-db2-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-dm", @@ -176,14 +176,14 @@ "description": "A professional database software developer and service provider, we provide users with technically advanced and detailed products.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/dm.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dm/2024.4.0/datacap-plugin-dm-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dm/2024.4.1-SNAPSHOT/datacap-plugin-dm-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-hdfs", @@ -191,14 +191,14 @@ "description": "Hadoop Distributed File System (HDFS) is a distributed file system based on the Hadoop project.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/hdfs.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hdfs/2024.4.0/datacap-plugin-hdfs-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hdfs/2024.4.1-SNAPSHOT/datacap-plugin-hdfs-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-alioss", @@ -206,14 +206,14 @@ "description": "Ali OSS is a cloud storage service that provides a simple, secure and efficient way to store and manage data.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/alioss.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-alioss/2024.4.0/datacap-plugin-alioss-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-alioss/2024.4.1-SNAPSHOT/datacap-plugin-alioss-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-dremio", @@ -221,14 +221,14 @@ "description": "Dremio is a free, open source, distributed data platform.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/dremio.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dremio/2024.4.0/datacap-plugin-dremio-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-dremio/2024.4.1-SNAPSHOT/datacap-plugin-dremio-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-druid", @@ -236,14 +236,14 @@ "description": "A high performance, real-time analytics database that delivers sub-second queries on streaming and batch data at scale and under load.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/druid.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-druid/2024.4.0/datacap-plugin-druid-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-druid/2024.4.1-SNAPSHOT/datacap-plugin-druid-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-duckdb", @@ -251,14 +251,14 @@ "description": "To use DuckDB, you must first create a connection to a database. The exact syntax varies between the client APIs but it typically involves passing an argument to configure persistence.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/duckdb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-duckdb/2024.4.0/datacap-plugin-duckdb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-duckdb/2024.4.1-SNAPSHOT/datacap-plugin-duckdb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-pinot", @@ -266,14 +266,14 @@ "description": "Apache Pinot is a real-time distributed OLAP datastore, built to deliver scalable real-time analytics with low latency. It can ingest from batch data sources (such as Hadoop HDFS, Amazon S3, Azure ADLS, Google Cloud Storage) as well as stream data sources (such as Apache Kafka).", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/pinot.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-pinot/2024.4.0/datacap-plugin-pinot-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-pinot/2024.4.1-SNAPSHOT/datacap-plugin-pinot-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-cassandra", @@ -281,14 +281,14 @@ "description": "Cassandra is a distributed key-value store for the cloud.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/cassandra.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cassandra/2024.4.0/datacap-plugin-cassandra-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-cassandra/2024.4.1-SNAPSHOT/datacap-plugin-cassandra-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-matrixone", @@ -296,14 +296,14 @@ "description": "MatrixOne is a hyper-converged cloud & edge native distributed database with a structure that separates storage, computation, and transactions to form a consolidated HSTAP data engine. This engine enables a single database system to accommodate diverse business loads such as OLTP, OLAP, and stream computing. It also supports deployment and utilization across public, private, and edge clouds, ensuring compatibility with diverse infrastructures.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/matrixone.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-matrixone/2024.4.0/datacap-plugin-matrixone-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-matrixone/2024.4.1-SNAPSHOT/datacap-plugin-matrixone-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-scylladb", @@ -311,14 +311,14 @@ "description": "ScyllaDB is a distributed database written in C++ and designed to fully exploit modern cloud infrastructure. It utilizes a shard-per-core architecture, meaning each CPU core has dedicated resources that independently handle data for maximum efficiency. ", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/scylladb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-scylladb/2024.4.0/datacap-plugin-scylladb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-scylladb/2024.4.1-SNAPSHOT/datacap-plugin-scylladb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-paradedb", @@ -326,14 +326,14 @@ "description": "ParadeDB is a modern Elasticsearch alternative built on Postgres. Built for real-time, update-heavy workloads.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/paradedb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-paradedb/2024.4.0/datacap-plugin-paradedb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-paradedb/2024.4.1-SNAPSHOT/datacap-plugin-paradedb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-hive-2x", @@ -341,14 +341,14 @@ "description": "The Apache Hive ™ is a distributed, fault-tolerant data warehouse system that enables analytics at a massive scale and facilitates reading, writing, and managing petabytes of data residing in distributed storage using SQL.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/hive2x.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "2.x" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hive-2x/2024.4.0/datacap-plugin-hive-2x-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hive-2x/2024.4.1-SNAPSHOT/datacap-plugin-hive-2x-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-kyuubi", @@ -356,14 +356,14 @@ "description": "Apache Kyuubi, a distributed and multi-tenant gateway to provide serverless SQL on lakehouses.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/kyuubi.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kyuubi/2024.4.0/datacap-plugin-kyuubi-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kyuubi/2024.4.1-SNAPSHOT/datacap-plugin-kyuubi-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-ignite", @@ -371,14 +371,14 @@ "description": "Apache Ignite is a distributed database for high-performance computing with in-memory speed.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/ignite.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ignite/2024.4.0/datacap-plugin-ignite-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ignite/2024.4.1-SNAPSHOT/datacap-plugin-ignite-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-impala", @@ -386,14 +386,14 @@ "description": "Apache Impala is the open source, native analytic database for open data and table formats.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/impala.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-impala/2024.4.0/datacap-plugin-impala-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-impala/2024.4.1-SNAPSHOT/datacap-plugin-impala-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-kylin", @@ -401,14 +401,14 @@ "description": "Apache Kylin is a leading open source OLAP engine for Big Data capable for sub-second query latency on trillions of records. Since being created and open sourced by eBay in 2014, and graduated to Top Level Project of Apache Software Foundation in 2015. Kylin has quickly been adopted by thousands of organizations world widely as their critical analytics application for Big Data.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/kylin.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kylin/2024.4.0/datacap-plugin-kylin-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-kylin/2024.4.1-SNAPSHOT/datacap-plugin-kylin-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-timescale", @@ -416,14 +416,14 @@ "description": "TimescaleDB is an open-source database designed to make SQL scalable for time-series data. It is engineered up from PostgreSQL and packaged as a PostgreSQL extension, providing automatic partitioning across time and space (partitioning key), as well as full SQL support.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/timescale.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-timescale/2024.4.0/datacap-plugin-timescale-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-timescale/2024.4.1-SNAPSHOT/datacap-plugin-timescale-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-iotdb", @@ -431,14 +431,14 @@ "description": "Apache IoTDB (Database for Internet of Things) is an IoT native database with high performance for data management and analysis, deployable on the edge and the cloud. ", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/iotdb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-iotdb/2024.4.0/datacap-plugin-iotdb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-iotdb/2024.4.1-SNAPSHOT/datacap-plugin-iotdb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-monetdb", @@ -446,14 +446,14 @@ "description": "MonetDB is a high performance relational database system for analytics.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/monetdb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-monetdb/2024.4.0/datacap-plugin-monetdb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-monetdb/2024.4.1-SNAPSHOT/datacap-plugin-monetdb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-ydb", @@ -461,14 +461,14 @@ "description": "YDB is a versatile open source Distributed SQL Database that combines high availability and scalability with strong consistency and ACID transactions. It accommodates transactional (OLTP), analytical (OLAP), and streaming workloads simultaneously.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/ydb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ydb/2024.4.0/datacap-plugin-ydb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ydb/2024.4.1-SNAPSHOT/datacap-plugin-ydb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-neo4j", @@ -476,14 +476,14 @@ "description": "Neo4j is a graph database for knowledge management and analysis.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/neo4j.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-neo4j/2024.4.0/datacap-plugin-neo4j-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-neo4j/2024.4.1-SNAPSHOT/datacap-plugin-neo4j-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-oceanbase", @@ -491,14 +491,14 @@ "description": "OceanBase is an enterprise distributed relational database with high availability, high performance, horizontal scalability, and compatibility with SQL standards.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/oceanbase.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-oceanbase/2024.4.0/datacap-plugin-oceanbase-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-oceanbase/2024.4.1-SNAPSHOT/datacap-plugin-oceanbase-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-oracle", @@ -506,14 +506,14 @@ "description": "Oracle is an advanced, enterprise-class relational database management system.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/oracle.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-oracle/2024.4.0/datacap-plugin-oracle-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-oracle/2024.4.1-SNAPSHOT/datacap-plugin-oracle-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-phoenix", @@ -521,14 +521,14 @@ "description": "Apache Phoenix is a SQL skin over HBase delivered as a client-embedded JDBC driver targeting low latency queries over HBase data. ", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/phoenix.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-phoenix/2024.4.0/datacap-plugin-phoenix-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-phoenix/2024.4.1-SNAPSHOT/datacap-plugin-phoenix-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-postgresql", @@ -536,14 +536,14 @@ "description": "PostgreSQL is a powerful, open source object-relational database system.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/postgresql.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-postgresql/2024.4.0/datacap-plugin-postgresql-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-postgresql/2024.4.1-SNAPSHOT/datacap-plugin-postgresql-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-presto", @@ -551,14 +551,14 @@ "description": "Presto is a distributed SQL query engine for big data.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/presto.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-presto/2024.4.0/datacap-plugin-presto-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-presto/2024.4.1-SNAPSHOT/datacap-plugin-presto-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-snowflake", @@ -566,14 +566,14 @@ "description": "Snowflake is a database for analytics.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/snowflake.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-snowflake/2024.4.0/datacap-plugin-snowflake-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-snowflake/2024.4.1-SNAPSHOT/datacap-plugin-snowflake-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-sqlserver", @@ -581,14 +581,14 @@ "description": "Microsoft SQL Server is a relational database management system.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/sqlserver.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-sqlserver/2024.4.0/datacap-plugin-sqlserver-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-sqlserver/2024.4.1-SNAPSHOT/datacap-plugin-sqlserver-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-tdengine", @@ -596,14 +596,14 @@ "description": "TDengine is an open source, high-performance, cloud native time-series database optimized for Internet of Things (IoT), Connected Cars, and Industrial IoT.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/tdengine.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-tdengine/2024.4.0/datacap-plugin-tdengine-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-tdengine/2024.4.1-SNAPSHOT/datacap-plugin-tdengine-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-trino", @@ -611,14 +611,14 @@ "description": "Trino is a distributed SQL query engine for big data.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/trino.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-trino/2024.4.0/datacap-plugin-trino-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-trino/2024.4.1-SNAPSHOT/datacap-plugin-trino-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-doris", @@ -626,14 +626,14 @@ "description": "Apache Doris is a distributed OLAP database.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/doris.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-doris/2024.4.0/datacap-plugin-doris-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-doris/2024.4.1-SNAPSHOT/datacap-plugin-doris-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-starrocks", @@ -641,14 +641,14 @@ "description": "StarRocks is a distributed OLAP database.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/starrocks.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-starrocks/2024.4.0/datacap-plugin-starrocks-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-starrocks/2024.4.1-SNAPSHOT/datacap-plugin-starrocks-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-hologres", @@ -656,14 +656,14 @@ "description": "Hologres is a unified real-time data warehousing service developed by Alibaba Cloud. You can use Hologres to write, update, process, and analyze large amounts of data in real time.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/hologres.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hologres/2024.4.0/datacap-plugin-hologres-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-hologres/2024.4.1-SNAPSHOT/datacap-plugin-hologres-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-greptimedb", @@ -671,14 +671,14 @@ "description": "GreptimeDB is an open-source unified & cost-effective time-series database for Metrics, Logs, and Events (also Traces in plan). You can gain real-time insights from Edge to Cloud at Any Scale.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/greptimedb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-greptimedb/2024.4.0/datacap-plugin-greptimedb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-greptimedb/2024.4.1-SNAPSHOT/datacap-plugin-greptimedb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-questdb", @@ -686,14 +686,14 @@ "description": "QuestDB is the world's fastest growing open-source time-series database. It offers massive ingestion throughput, millisecond queries, powerful time-series SQL extensions, and scales well with minimal and maximal hardware. Save costs with better performance and efficiency.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/questdb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-questdb/2024.4.0/datacap-plugin-questdb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-questdb/2024.4.1-SNAPSHOT/datacap-plugin-questdb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-h2-tcp", @@ -701,14 +701,14 @@ "description": "Use TCP to access the remote H2 database", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/h2tcp.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-h2-tcp/2024.4.0/datacap-plugin-h2-tcp-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-h2-tcp/2024.4.1-SNAPSHOT/datacap-plugin-h2-tcp-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-clickhouse-http", @@ -716,14 +716,14 @@ "description": "Use HTTP to access the remote ClickHouse database", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/clickhousehttp.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-clickhouse-http/2024.4.0/datacap-plugin-clickhouse-http-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-clickhouse-http/2024.4.1-SNAPSHOT/datacap-plugin-clickhouse-http-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-ceresdb", @@ -731,14 +731,14 @@ "description": "CeresDB is a database system designed to allow for the storage and retrieval of semi-structured data, i.e. data that conforms to a “top-level schema” where columns types are known, but those columns can in-turn contain dictionaries or lists.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/ceresdb.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ceresdb/2024.4.0/datacap-plugin-ceresdb-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-ceresdb/2024.4.1-SNAPSHOT/datacap-plugin-ceresdb-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-redis-jdbc", @@ -746,14 +746,14 @@ "description": "Use JDBC to access Redis", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/redisjdbc.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-redis-jdbc/2024.4.0/datacap-plugin-redis-jdbc-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-redis-jdbc/2024.4.1-SNAPSHOT/datacap-plugin-redis-jdbc-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-elasticsearch-8x", @@ -761,14 +761,14 @@ "description": "Use Elasticsearch 8.x", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/elasticsearch8x.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "8.x" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-elasticsearch-8x/2024.4.0/datacap-plugin-elasticsearch-8x-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-elasticsearch-8x/2024.4.1-SNAPSHOT/datacap-plugin-elasticsearch-8x-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-mongo-atlas", @@ -776,14 +776,14 @@ "description": "The MongoDB Atlas SQL JDBC Driver provides SQL connectivity to MongoDB Atlas for client applications developed in Java.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/mongoatlas.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mongo-atlas/2024.4.0/datacap-plugin-mongo-atlas-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mongo-atlas/2024.4.1-SNAPSHOT/datacap-plugin-mongo-atlas-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-plugin-mongo-community", @@ -791,14 +791,14 @@ "description": "The MongoDB Community SQL JDBC Driver provides SQL connectivity to MongoDB Community for client applications developed in Java.", "i18nFormat": true, "type": "Connector", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/plugin/mongocommunity.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mongo-community/2024.4.0/datacap-plugin-mongo-community-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-plugin-mongo-community/2024.4.1-SNAPSHOT/datacap-plugin-mongo-community-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-convert-csv", @@ -806,14 +806,14 @@ "description": "A CSV file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/csv.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-csv/2024.4.0/convert/datacap-convert-csv-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-csv/2024.4.1-SNAPSHOT/datacap-convert-csv-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-convert-txt", @@ -821,14 +821,14 @@ "description": "A TXT file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/txt.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-txt/2024.4.0/convert/datacap-convert-txt-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-txt/2024.4.1-SNAPSHOT/datacap-convert-txt-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-convert-json", @@ -836,14 +836,14 @@ "description": "A JSON file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/json.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-json/2024.4.0/convert/datacap-convert-json-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-json/2024.4.1-SNAPSHOT/datacap-convert-json-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-convert-xml", @@ -851,14 +851,14 @@ "description": "A XML file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/xml.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-xml/2024.4.0/convert/datacap-convert-xml-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-xml/2024.4.1-SNAPSHOT/datacap-convert-xml-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-convert-none", @@ -866,14 +866,14 @@ "description": "A multidimensional arrays file conversion plugin for DataCap.", "i18nFormat": true, "type": "Converter", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/convert/none.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-none/2024.4.0/convert/datacap-convert-none-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-convert-none/2024.4.1-SNAPSHOT/datacap-convert-none-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-executor-local", @@ -881,14 +881,14 @@ "description": "A local execution plugin for DataCap.", "i18nFormat": true, "type": "Executor", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/executor/local.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-executor-local/2024.4.0/executor/datacap-executor-local-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-executor-local/2024.4.1-SNAPSHOT/datacap-executor-local-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-executor-seatunnel", @@ -896,14 +896,14 @@ "description": "A Seatunnel execution plugin for DataCap.", "i18nFormat": true, "type": "Executor", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/executor/seatunnel.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-executor-seatunnel/2024.4.0/executor/datacap-executor-seatunnel-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-executor-seatunnel/2024.4.1-SNAPSHOT/datacap-executor-seatunnel-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-scheduler-local", @@ -911,14 +911,14 @@ "description": "A local scheduling plugin for DataCap.", "i18nFormat": true, "type": "Scheduler", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/scheduler/local.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-scheduler-local/2024.4.0/scheduler/datacap-scheduler-local-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-scheduler-local/2024.4.1-SNAPSHOT/datacap-scheduler-local-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-fs-local", @@ -926,14 +926,14 @@ "description": "A local file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/local.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-local/2024.4.0/fs/datacap-fs-local-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-local/2024.4.1-SNAPSHOT/datacap-fs-local-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-fs-qiniu", @@ -941,14 +941,14 @@ "description": "A Qiniu file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/qiniu.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-qiniu/2024.4.0/fs/datacap-fs-qiniu-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-qiniu/2024.4.1-SNAPSHOT/datacap-fs-qiniu-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-fs-alioss", @@ -956,14 +956,14 @@ "description": "A Ali OSS file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/alioss.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-alioss/2024.4.0/fs/datacap-fs-alioss-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-alioss/2024.4.1-SNAPSHOT/datacap-fs-alioss-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-fs-tencent-cos", @@ -971,14 +971,14 @@ "description": "A Tencent COS file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/tencent-cos.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-tencent-cos/2024.4.0/fs/datacap-fs-tencent-cos-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-tencent-cos/2024.4.1-SNAPSHOT/datacap-fs-tencent-cos-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-fs-amazon-s3", @@ -986,14 +986,14 @@ "description": "A Amazon S3 file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/amazon-s3.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-amazon-s3/2024.4.0/fs/datacap-fs-amazon-s3-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-amazon-s3/2024.4.1-SNAPSHOT/datacap-fs-amazon-s3-2024.4.1-SNAPSHOT-bin.tar.gz" }, { "key": "datacap-fs-minio", @@ -1001,14 +1001,14 @@ "description": "A Minio file system plugin for DataCap.", "i18nFormat": true, "type": "FileSystem", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "author": "datacap-community", "logo": "http://devlive-cdn.oss-cn-beijing.aliyuncs.com/applications/datacap/resources/logo/fs/minio.svg", - "released": "2024-12-02 16:45:07", + "released": "2024-12-02 18:54:48", "supportVersion": [ "ALL" ], - "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-minio/2024.4.0/fs/datacap-fs-minio-bin.tar.gz" + "url": "https://repo1.maven.org/maven2/io/edurt/datacap/datacap-fs-minio/2024.4.1-SNAPSHOT/datacap-fs-minio-2024.4.1-SNAPSHOT-bin.tar.gz" } ] } \ No newline at end of file diff --git a/configure/publish/publish-docker.sh b/configure/publish/publish-docker.sh new file mode 100644 index 0000000000..ad29330f0a --- /dev/null +++ b/configure/publish/publish-docker.sh @@ -0,0 +1,80 @@ +#!/bin/sh + +# Check if Docker is installed and running +# 检查 Docker 是否已安装并正在运行 +if ! command -v docker &> /dev/null; then + echo "Error: Docker is not installed" + echo "错误:Docker 未安装" + exit 1 +fi + +# Check if Docker daemon is running +# 检查 Docker 守护进程是否运行 +if ! docker info &> /dev/null; then + echo "Error: Docker daemon is not running" + echo "错误:Docker 守护进程未运行" + exit 1 +fi + +# Check if Docker buildx is available +# 检查 Docker buildx 是否可用 +if ! docker buildx version &> /dev/null; then + echo "Error: Docker buildx is not available" + echo "错误:Docker buildx 不可用" + exit 1 +fi + +# Get Maven project version +# 获取 Maven 项目版本号 +APP_VERSION=`./mvnw -Dexec.executable='echo' -Dexec.args='${project.version}' --non-recursive exec:exec -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.help=INFO | tail -1` + +# Check if version was successfully retrieved +# 检查版本号是否成功获取 +if [ -z "$APP_VERSION" ]; then + echo "Error: Failed to get application version" + echo "错误:获取应用版本失败" + exit 1 +fi + +# Print the version that will be used +# 打印将要使用的版本号 +echo "Building version: ${APP_VERSION}" +echo "构建版本:${APP_VERSION}" + +# Create and use a new builder instance +# 创建并使用新的构建器实例 +echo "Setting up Docker buildx..." +echo "设置 Docker buildx..." +docker buildx create --use + +# Check if Dockerfile exists +# 检查 Dockerfile 是否存在 +if [ ! -f "Dockerfile" ]; then + echo "Error: Dockerfile not found" + echo "错误:未找到 Dockerfile" + exit 1 +fi + +# Build multi-architecture images and push to Docker Hub +# - Platform: linux/amd64 (Intel/AMD) and linux/arm64 (ARM) +# - Tags: version specific and latest +# 构建多架构镜像并推送到 Docker Hub +# - 平台: linux/amd64 (Intel/AMD) 和 linux/arm64 (ARM) +# - 标签: 特定版本和最新版 +echo "Building and pushing Docker images..." +echo "构建并推送 Docker 镜像..." +if docker buildx build --platform linux/amd64,linux/arm64 \ + -t devliveorg/datacap:${APP_VERSION} \ + -t devliveorg/datacap:latest \ + . --push; then + # Print success message + # 打印成功信息 + echo "Successfully built and pushed images:" + echo "成功构建并推送以下镜像:" + echo "- devliveorg/datacap:${APP_VERSION}" + echo "- devliveorg/datacap:latest" +else + echo "Error: Failed to build and push Docker images" + echo "错误:构建并推送 Docker 镜像失败" + exit 1 +fi \ No newline at end of file diff --git a/configure/publish/publish-newVersion.sh b/configure/publish/publish-newVersion.sh index f4ac95fdda..82fa81afc7 100644 --- a/configure/publish/publish-newVersion.sh +++ b/configure/publish/publish-newVersion.sh @@ -42,8 +42,16 @@ job_runner_apply() { echo "Apply new version for metadata ..." # Update version and URL in metadata.json using perl + # 更新 version 字段 perl -i -pe 's/"version": "[^"]*"/"version": "'"$VERSION"'"/' "$HOME/configure/metadata.json" - perl -i -pe 's|/plugins/[^/]+/|/plugins/'"$VERSION"'/|g' "$HOME/configure/metadata.json" + + # 更新 url 字段中的版本号 (针对类似 2024.4.0 这样的版本格式) + # Update the version number in the url field (for a version format like 2024.4.0) + perl -i -pe 's/\/\d{4}\.\d+\.\d+\//\/'"$VERSION"'\//' "$HOME/configure/metadata.json" + perl -i -pe 's/-\d{4}\.\d+\.\d+-bin/-'"$VERSION"'-bin/' "$HOME/configure/metadata.json" + + # 更新发布日期 + # Update publish date perl -i -pe 's/"released": "[^"]*"/"released": "'"$CURRENT_DATE"'"/' "$HOME/configure/metadata.json" printf "Apply new version for web ...\n" diff --git a/convert/datacap-convert-csv/pom.xml b/convert/datacap-convert-csv/pom.xml index 5df3b33a7b..82fb7c4bec 100644 --- a/convert/datacap-convert-csv/pom.xml +++ b/convert/datacap-convert-csv/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-json/pom.xml b/convert/datacap-convert-json/pom.xml index 217f84adeb..e2fc99a390 100644 --- a/convert/datacap-convert-json/pom.xml +++ b/convert/datacap-convert-json/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-none/pom.xml b/convert/datacap-convert-none/pom.xml index 3bc772ca95..649c46bf60 100644 --- a/convert/datacap-convert-none/pom.xml +++ b/convert/datacap-convert-none/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-spi/pom.xml b/convert/datacap-convert-spi/pom.xml index 667adfd1ec..2f14463e4c 100644 --- a/convert/datacap-convert-spi/pom.xml +++ b/convert/datacap-convert-spi/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-txt/pom.xml b/convert/datacap-convert-txt/pom.xml index 11dd9e66ca..bfaa03bc9c 100644 --- a/convert/datacap-convert-txt/pom.xml +++ b/convert/datacap-convert-txt/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/convert/datacap-convert-xml/pom.xml b/convert/datacap-convert-xml/pom.xml index 201aa23928..24d64984f3 100644 --- a/convert/datacap-convert-xml/pom.xml +++ b/convert/datacap-convert-xml/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/core/datacap-captcha/pom.xml b/core/datacap-captcha/pom.xml index a52f4b99ee..a275a96055 100644 --- a/core/datacap-captcha/pom.xml +++ b/core/datacap-captcha/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/core/datacap-common/pom.xml b/core/datacap-common/pom.xml index 55222e67cc..9f3747687d 100644 --- a/core/datacap-common/pom.xml +++ b/core/datacap-common/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/core/datacap-parser/pom.xml b/core/datacap-parser/pom.xml index 936a6e720d..0a67635853 100644 --- a/core/datacap-parser/pom.xml +++ b/core/datacap-parser/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/core/datacap-parser/src/main/antlr4/io/edurt/datacap/sql/parser/SqlBase.g4 b/core/datacap-parser/src/main/antlr4/io/edurt/datacap/sql/parser/SqlBase.g4 index 0fdec7ccb1..15369a7eb0 100644 --- a/core/datacap-parser/src/main/antlr4/io/edurt/datacap/sql/parser/SqlBase.g4 +++ b/core/datacap-parser/src/main/antlr4/io/edurt/datacap/sql/parser/SqlBase.g4 @@ -1,68 +1,617 @@ grammar SqlBase; -singleStatement:(statement)*; +singleStatement: (statement)* EOF; -SHOW: [Ss][Hh][Oo][Ww]; -PATHS: [Pp][Aa][Tt][Hh][Ss]; -TOPICS: 'TOPICS'; -CONSUMERS: 'CONSUMERS'; -DATABASES: [Dd][Aa][Tt][Aa][Bb][Aa][Ss][Es][Ss]; -TABLES: [Tt][Aa][Bb][Ll][Ee][Ss]; +// Keywords SELECT: [Ss][Ee][Ll][Ee][Cc][Tt]; FROM: [Ff][Rr][Oo][Mm]; +WHERE: [Ww][Hh][Ee][Rr][Ee]; +ORDER: [Oo][Rr][Dd][Ee][Rr]; +BY: [Bb][Yy]; +GROUP: [Gg][Rr][Oo][Uu][Pp]; +HAVING: [Hh][Aa][Vv][Ii][Nn][Gg]; +LIMIT: [Ll][Ii][Mm][Ii][Tt]; +OFFSET: [Oo][Ff][Ff][Ss][Ee][Tt]; +ASC: [Aa][Ss][Cc]; +DESC: [Dd][Ee][Ss][Cc]; +AS: [Aa][Ss]; +ON: [Oo][Nn]; +USING: [Uu][Ss][Ii][Nn][Gg]; +INNER: [Ii][Nn][Nn][Ee][Rr]; +LEFT: [Ll][Ee][Ff][Tt]; +RIGHT: [Rr][Ii][Gg][Hh][Tt]; +FULL: [Ff][Uu][Ll][Ll]; +OUTER: [Oo][Uu][Tt][Ee][Rr]; +JOIN: [Jj][Oo][Ii][Nn]; +CROSS: [Cc][Rr][Oo][Ss][Ss]; +NATURAL: [Nn][Aa][Tt][Uu][Rr][Aa][Ll]; +INSERT: [Ii][Nn][Ss][Ee][Rr][Tt]; +INTO: [Ii][Nn][Tt][Oo]; +VALUES: [Vv][Aa][Ll][Uu][Ee][Ss]; +UPDATE: [Uu][Pp][Dd][Aa][Tt][Ee]; +SET: [Ss][Ee][Tt]; +DELETE: [Dd][Ee][Ll][Ee][Tt][Ee]; +CREATE: [Cc][Rr][Ee][Aa][Tt][Ee]; +ALTER: [Aa][Ll][Tt][Ee][Rr]; +DROP: [Dd][Rr][Oo][Pp]; +TABLE: [Tt][Aa][Bb][Ll][Ee]; +VIEW: [Vv][Ii][Ee][Ww]; +INDEX: [Ii][Nn][Dd][Ee][Xx]; +PRIMARY: [Pp][Rr][Ii][Mm][Aa][Rr][Yy]; +KEY: [Kk][Ee][Yy]; +FOREIGN: [Ff][Oo][Rr][Ee][Ii][Gg][Nn]; +REFERENCES: [Rr][Ee][Ff][Ee][Rr][Ee][Nn][Cc][Ee][Ss]; +CONSTRAINT: [Cc][Oo][Nn][Ss][Tt][Rr][Aa][Ii][Nn][Tt]; +DEFAULT: [Dd][Ee][Ff][Aa][Uu][Ll][Tt]; +UNIQUE: [Uu][Nn][Ii][Qq][Uu][Ee]; +CHECK: [Cc][Hh][Ee][Cc][Kk]; +COLUMN: [Cc][Oo][Ll][Uu][Mm][Nn]; +DATABASE: [Dd][Aa][Tt][Aa][Bb][Aa][Ss][Ee]; +USE: [Uu][Ss][Ee]; +IF: [Ii][Ff]; +EXISTS: [Ee][Xx][Ii][Ss][Tt][Ss]; +REPLACE: [Rr][Ee][Pp][Ll][Aa][Cc][Ee]; +TEMP: [Tt][Ee][Mm][Pp]; +TEMPORARY: [Tt][Ee][Mm][Pp][Oo][Rr][Aa][Rr][Yy]; +SHOW: [Ss][Hh][Oo][Ww]; +DATABASES: [Dd][Aa][Tt][Aa][Bb][Aa][Ss][Ee][Ss]; +TABLES: [Tt][Aa][Bb][Ll][Ee][Ss]; +COLUMNS: [Cc][Oo][Ll][Uu][Mm][Nn][Ss]; +CREATE_TIME: [Cc][Rr][Ee][Aa][Tt][Ee]'_'[Tt][Ii][Mm][Ee]; +UPDATE_TIME: [Uu][Pp][Dd][Aa][Tt][Ee]'_'[Tt][Ii][Mm][Ee]; + +// Operators +AND: [Aa][Nn][Dd]; +OR: [Oo][Rr]; +NOT: [Nn][Oo][Tt]; +IS: [Ii][Ss]; +NULL: [Nn][Uu][Ll][Ll]; +LIKE: [Ll][Ii][Kk][Ee]; +IN: [Ii][Nn]; +BETWEEN: [Bb][Ee][Tt][Ww][Ee][Ee][Nn]; +CASE: [Cc][Aa][Ss][Ee]; +WHEN: [Ww][Hh][Ee][Nn]; +THEN: [Tt][Hh][Ee][Nn]; +ELSE: [Ee][Ll][Ss][Ee]; +END: [Ee][Nn][Dd]; + +// Others +CHARSET: [Cc][Hh][Aa][Rr][Ss][Ee][Tt]; +COLLATE: [Cc][Oo][Ll][Ll][Aa][Tt][Ee]; +AUTO_INCREMENT: [Aa][Uu][Tt][Oo]'_'[Ii][Nn][Cc][Rr][Ee][Mm][Ee][Nn][Tt]; +COMMENT: [Cc][Oo][Mm][Mm][Ee][Nn][Tt]; +ADD: [Aa][Dd][Dd]; +MODIFY: [Mm][Oo][Dd][Ii][Ff][Yy]; +ENGINE: [Ee][Nn][Gg][Ii][Nn][Ee]; +VERSION: [Vv][Ee][Rr][Ss][Ii][Oo][Nn]; + +tableOptions + : tableOption+ + ; + +tableOption + : ENGINE '=' STRING + | CHARSET '=' STRING + | COLLATE '=' STRING + | AUTO_INCREMENT '=' INTEGER_VALUE + | COMMENT '=' STRING + ; statement - : SHOW childPathStatement - | SELECT columnStatement fromClause - | kafkaStatement + : selectStatement + | insertStatement + | updateStatement + | deleteStatement + | createStatement + | alterStatement + | dropStatement + | useStatement + | showStatement + ; + +// USE statement +useStatement + : USE databaseName + ; + +// SELECT statement +selectStatement + : queryExpression + orderByClause? + limitClause? + ; + +queryExpression + : queryTerm + | queryExpression UNION (ALL)? queryTerm + | queryExpression EXCEPT queryTerm + | queryExpression INTERSECT queryTerm + ; + +queryTerm + : queryPrimary + ; + +queryPrimary + : querySpecification + | '(' queryExpression ')' + ; + +querySpecification + : SELECT (ALL | DISTINCT)? selectElements + fromClause? + whereClause? + groupByClause? + havingClause? + ; + +selectElements + : selectElement (',' selectElement)* + ; + +selectElement + : (tableName '.')? (columnName | '*') (AS? alias)? + | expression (AS? alias)? + | caseExpression (AS? alias)? + ; + +caseExpression + : CASE + (WHEN expression THEN expression)+ + (ELSE expression)? + END + ; + +whereClause + : WHERE expression + ; + +groupByClause + : GROUP BY groupByElement (',' groupByElement)* + ; + +groupByElement + : expression + ; + +havingClause + : HAVING expression + ; + +orderByClause + : ORDER BY orderByElement (',' orderByElement)* + ; + +orderByElement + : expression (ASC | DESC)? + ; + +limitClause + : LIMIT INTEGER_VALUE (OFFSET INTEGER_VALUE)? + | LIMIT INTEGER_VALUE ',' INTEGER_VALUE + ; + +// INSERT statement +insertStatement + : INSERT (OR REPLACE)? INTO tableName + ('(' columnName (',' columnName)* ')')? + (VALUES insertValuesConstructor (',' insertValuesConstructor)* + | selectStatement) + ; + +insertValuesConstructor + : '(' value (',' value)* ')' + ; + +// UPDATE statement +updateStatement + : UPDATE tableName + SET updateElement (',' updateElement)* + whereClause? + ; + +updateElement + : columnName '=' expression + ; + +// DELETE statement +deleteStatement + : DELETE FROM tableName + whereClause? + ; + +// CREATE statement +createStatement + : createTableStatement + | createViewStatement + | createIndexStatement + | createDatabaseStatement + ; + +createDatabaseStatement + : CREATE DATABASE (IF NOT EXISTS)? databaseName + ; + +createTableStatement + : CREATE (TEMP | TEMPORARY)? TABLE (IF NOT EXISTS)? tableName + '(' tableElement (',' tableElement)* ')' + tableOptions? + ; + +createViewStatement + : CREATE (OR REPLACE)? VIEW tableName + ('(' columnName (',' columnName)* ')')? + AS selectStatement + ; + +createIndexStatement + : CREATE (UNIQUE)? INDEX indexName + ON tableName '(' indexColumn (',' indexColumn)* ')' + ; + +indexColumn + : columnName (ASC | DESC)? + ; + +tableElement + : columnDefinition + | tableConstraint + ; + +columnDefinition + : columnName dataType columnConstraint* + ; + +columnConstraint + : NOT? NULL + | PRIMARY KEY + | UNIQUE + | DEFAULT defaultValue + | (CONSTRAINT constraintName)? foreignKeyClause + | (CONSTRAINT constraintName)? checkConstraint + ; + +tableConstraint + : (CONSTRAINT constraintName)? + ( primaryKeyConstraint + | uniqueConstraint + | foreignKeyConstraint + | checkConstraint + ) + ; + +primaryKeyConstraint + : PRIMARY KEY '(' columnName (',' columnName)* ')' + ; + +uniqueConstraint + : UNIQUE '(' columnName (',' columnName)* ')' + ; + +foreignKeyConstraint + : FOREIGN KEY '(' columnName (',' columnName)* ')' + foreignKeyClause + ; + +foreignKeyClause + : REFERENCES tableName ('(' columnName (',' columnName)* ')')? + (ON DELETE referenceOption)? + (ON UPDATE referenceOption)? + ; + +referenceOption + : RESTRICT + | CASCADE + | SET NULL + | NO ACTION + | SET DEFAULT + ; + +checkConstraint + : CHECK '(' expression ')' + ; + +// ALTER statement +alterStatement + : alterTableStatement + ; + +alterTableStatement + : ALTER TABLE tableName + alterSpecification (',' alterSpecification)* + ; + +alterSpecification + : ADD COLUMN? columnDefinition + | ADD tableConstraint + | DROP COLUMN columnName + | DROP CONSTRAINT constraintName + | MODIFY COLUMN? columnDefinition + | ALTER COLUMN columnName SET DEFAULT expression + | ALTER COLUMN columnName DROP DEFAULT + ; + +// DROP statement +dropStatement + : dropTableStatement + | dropViewStatement + | dropIndexStatement + | dropDatabaseStatement + ; + +dropTableStatement + : DROP TABLE (IF EXISTS)? tableName (',' tableName)* ; -// -------------------- Kafka Statement -------------------- -kafkaQueryTopicStatement - : SHOW TOPICS - | SHOW DATABASES +dropViewStatement + : DROP VIEW (IF EXISTS)? tableName (',' tableName)* ; -kafkaQueryConsumerStatement - : SHOW CONSUMERS - | SHOW CONSUMERS fromClause - | SHOW TABLES - | SHOW TABLES fromClause + +dropIndexStatement + : DROP INDEX indexName ON tableName + ; + +dropDatabaseStatement + : DROP DATABASE (IF EXISTS)? databaseName + ; + +// SHOW statement +showStatement + : showDatabasesStatement + | showTablesStatement + | showColumnsStatement ; -kafkaQueryStatement - : kafkaQueryTopicStatement - | kafkaQueryConsumerStatement + +showDatabasesStatement + : SHOW DATABASES (LIKE STRING)? ; -kafkaStatement - : kafkaQueryStatement + +showTablesStatement + : SHOW TABLES + (FROM | IN)? databaseName? + (LIKE STRING | WHERE expression)? ; -// SHOW PATHS, SHOW PATHS FROM ... -childPathStatement: PATHS | PATHS fromClause; +showColumnsStatement + : SHOW COLUMNS + (FROM | IN) tableName + ((FROM | IN) databaseName)? + (LIKE STRING | WHERE expression)? + ; -columnStatement: identifier; +// FROM clause and JOINs +fromClause + : FROM tableSource (',' tableSource)* + ; -fromClause : FROM tableName; +tableSource + : tablePrimary + | joinedTable + ; -tableName: identifier ('.' identifier)*; -identifier: (IDENTIFIER | STRING | quotedIdentifier)*; -quotedIdentifier: BACKQUOTED_IDENTIFIER; +tablePrimary + : tableName (AS? alias)? + | '(' selectStatement ')' (AS? alias)? + ; + +joinedTable + : tablePrimary joinClause+ + ; + +joinType + : INNER + | LEFT OUTER? + | RIGHT OUTER? + | FULL OUTER? + ; + +joinClause + : joinTypeClause tablePrimary joinCondition + | NATURAL (INNER | LEFT | RIGHT | FULL)? JOIN tablePrimary + | CROSS JOIN tablePrimary + ; + +joinTypeClause + : INNER JOIN + | LEFT OUTER? JOIN + | RIGHT OUTER? JOIN + | FULL OUTER? JOIN + | JOIN + ; + +joinCondition + : ON expression + | USING '(' columnName (',' columnName)* ')' + ; + +// Expressions +expression + : '(' expression ')' #ParenExpression + | primary #PrimaryExpression + | expression comparisonOperator expression #ComparisonExpression + | expression AND expression #AndExpression + | expression OR expression #OrExpression + | expression NOT? BETWEEN expression AND expression #BetweenExpression + | expression NOT? IN ('(' expression (',' expression)* ')') #InExpression + | expression NOT? LIKE expression #LikeExpression // 添加这一行 + | expression IS NOT? NULL #IsNullExpression + | expression IS NOT? (TRUE | FALSE) #IsBooleanExpression + | NOT expression #NotExpression + | expression '+' expression #AddExpression + | expression '-' expression #SubtractExpression + | expression '*' expression #MultiplyExpression + | expression '/' expression #DivideExpression + ; + + primary + : literal #LiteralPrimary + | columnReference #ColumnReferencePrimary + | functionCall #FunctionCallPrimary + ; + + literal + : STRING + | INTEGER_VALUE + | DECIMAL_VALUE + | TRUE + | FALSE + | NULL + ; + + comparisonOperator + : '=' | '>' | '<' | '>=' | '<=' | '<>' | '!=' | '<=>' + ; + +expressionList + : '(' expression (',' expression)* ')' + | selectStatement + ; + +columnReference + : (tableName '.')? columnName + ; + +functionCall + : functionName '(' (DISTINCT? expression (',' expression)*)? ')' + | CAST '(' expression AS dataType ')' + | EXTRACT '(' identifier FROM expression ')' + | VERSION '(' ')' + ; + +// Common elements +value: expression; + +defaultValue + : literal + | '(' expression ')' + ; + +// Names and Identifiers +columnName: identifier; +tableName: (databaseName '.')? identifier; +databaseName: identifier; +indexName: identifier; +constraintName: identifier; +alias: identifier; +functionName: identifier; + +identifier + : IDENTIFIER + | quotedIdentifier + | nonReservedWord + ; + +quotedIdentifier + : BACKQUOTED_IDENTIFIER + ; + +// Data Types +dataType + : baseDataType ('(' INTEGER_VALUE (',' INTEGER_VALUE)* ')')? + ; + +baseDataType + : CHARACTER | VARCHAR | BINARY | VARBINARY + | TINYINT | SMALLINT | INTEGER | INT | BIGINT + | FLOAT | REAL | DOUBLE | DECIMAL | NUMERIC + | DATE | TIME | TIMESTAMP | DATETIME + | BOOLEAN | BOOL + | BLOB | TEXT + | JSON | XML + ; + +// Additional Keywords +UNION: [Uu][Nn][Ii][Oo][Nn]; +ALL: [Aa][Ll][Ll]; +DISTINCT: [Dd][Ii][Ss][Tt][Ii][Nn][Cc][Tt]; +EXCEPT: [Ee][Xx][Cc][Ee][Pp][Tt]; +INTERSECT: [Ii][Nn][Tt][Ee][Rr][Ss][Ee][Cc][Tt]; +CHARACTER: [Cc][Hh][Aa][Rr][Aa][Cc][Tt][Ee][Rr]; +VARCHAR: [Vv][Aa][Rr][Cc][Hh][Aa][Rr]; +BINARY: [Bb][Ii][Nn][Aa][Rr][Yy]; +VARBINARY: [Vv][Aa][Rr][Bb][Ii][Nn][Aa][Rr][Yy]; +TINYINT: [Tt][Ii][Nn][Yy][Ii][Nn][Tt]; +SMALLINT: [Ss][Mm][Aa][Ll][Ll][Ii][Nn][Tt]; +INTEGER: [Ii][Nn][Tt][Ee][Gg][Ee][Rr]; +INT: [Ii][Nn][Tt]; +BIGINT: [Bb][Ii][Gg][Ii][Nn][Tt]; +FLOAT: [Ff][Ll][Oo][Aa][Tt]; +REAL: [Rr][Ee][Aa][Ll]; +DOUBLE: [Dd][Oo][Uu][Bb][Ll][Ee]; +DECIMAL: [Dd][Ee][Cc][Ii][Mm][Aa][Ll]; +NUMERIC: [Nn][Uu][Mm][Ee][Rr][Ii][Cc]; +DATE: [Dd][Aa][Tt][Ee]; +TIME: [Tt][Ii][Mm][Ee]; +TIMESTAMP: [Tt][Ii][Mm][Ee][Ss][Tt][Aa][Mm][Pp]; +DATETIME: [Dd][Aa][Tt][Ee][Tt][Ii][Mm][Ee]; +BOOLEAN: [Bb][Oo][Oo][Ll][Ee][Aa][Nn]; +BOOL: [Bb][Oo][Oo][Ll]; +BLOB: [Bb][Ll][Oo][Bb]; +TEXT: [Tt][Ee][Xx][Tt]; +JSON: [Jj][Ss][Oo][Nn]; +XML: [Xx][Mm][Ll]; +CURRENT_TIMESTAMP: [Cc][Uu][Rr][Rr][Ee][Nn][Tt]'_'[Tt][Ii][Mm][Ee][Ss][Tt][Aa][Mm][Pp]; +CAST: [Cc][Aa][Ss][Tt]; +EXTRACT: [Ee][Xx][Tt][Rr][Aa][Cc][Tt]; +RESTRICT: [Rr][Ee][Ss][Tt][Rr][Ii][Cc][Tt]; +CASCADE: [Cc][Aa][Ss][Cc][Aa][Dd][Ee]; +NO: [Nn][Oo]; +ACTION: [Aa][Cc][Tt][Ii][Oo][Nn]; + +// Non-reserved words that can be used as identifiers +nonReservedWord + : TEMP | TEMPORARY | REPLACE | EXISTS | IF + | CONSTRAINT | COLUMN | DATABASE | INDEX + | RESTRICT | CASCADE | NO | ACTION + | CHARACTER | VARCHAR | BINARY | VARBINARY + | TINYINT | SMALLINT | INTEGER | INT | BIGINT + | FLOAT | REAL | DOUBLE | DECIMAL | NUMERIC + | DATE | TIME | TIMESTAMP | DATETIME + | BOOLEAN | BOOL | BLOB | TEXT | JSON | XML + | CHARSET | COLLATE | AUTO_INCREMENT | COMMENT + | ADD | MODIFY | ENGINE + | DATABASES | TABLES | COLUMNS + | CREATE_TIME | UPDATE_TIME + | VERSION + ; + +// Lexer rules +fragment DIGIT: [0-9]; +fragment LETTER: [a-zA-Z]; +TRUE: [Tt][Rr][Uu][Ee]; +FALSE: [Ff][Aa][Ll][Ss][Ee]; + +INTEGER_VALUE + : DIGIT+ + ; + +DECIMAL_VALUE + : DIGIT+ '.' DIGIT* + | '.' DIGIT+ + ; -fragment DIGIT:[0-9]; -fragment LETTER:[a-zA-Z]; STRING : '\'' ( ~('\''|'\\') | ('\\' .) )* '\'' | '"' ( ~('"'|'\\') | ('\\' .) )* '"' - | '*' ; + IDENTIFIER - : (LETTER | DIGIT | '_')+ + : (LETTER | '_') (LETTER | DIGIT | '_')* ; BACKQUOTED_IDENTIFIER : '`' ( ~'`' | '``' )* '`' ; -SIMPLE_COMMENT: '--' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN); -BRACKETED_EMPTY_COMMENT: '/**/' -> channel(HIDDEN); -BRACKETED_COMMENT : '/*' ~[+] .*? '*/' -> channel(HIDDEN); -WS: [ \r\n\t]+ -> channel(HIDDEN); +// Comments and whitespace +SIMPLE_COMMENT + : '--' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN) + ; + +BRACKETED_COMMENT + : '/*' .*? '*/' -> channel(HIDDEN) + ; + +WS + : [ \r\n\t]+ -> channel(HIDDEN) + ; \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/SqlBase.tokens b/core/datacap-parser/src/main/java/SqlBase.tokens deleted file mode 100644 index 0b5d6c9e8f..0000000000 --- a/core/datacap-parser/src/main/java/SqlBase.tokens +++ /dev/null @@ -1,20 +0,0 @@ -T__0=1 -SHOW=2 -PATHS=3 -TOPICS=4 -CONSUMERS=5 -DATABASES=6 -TABLES=7 -SELECT=8 -FROM=9 -STRING=10 -IDENTIFIER=11 -BACKQUOTED_IDENTIFIER=12 -SIMPLE_COMMENT=13 -BRACKETED_EMPTY_COMMENT=14 -BRACKETED_COMMENT=15 -WS=16 -'.'=1 -'TOPICS'=4 -'CONSUMERS'=5 -'/**/'=14 diff --git a/core/datacap-parser/src/main/java/SqlBaseLexer.tokens b/core/datacap-parser/src/main/java/SqlBaseLexer.tokens deleted file mode 100644 index 0b5d6c9e8f..0000000000 --- a/core/datacap-parser/src/main/java/SqlBaseLexer.tokens +++ /dev/null @@ -1,20 +0,0 @@ -T__0=1 -SHOW=2 -PATHS=3 -TOPICS=4 -CONSUMERS=5 -DATABASES=6 -TABLES=7 -SELECT=8 -FROM=9 -STRING=10 -IDENTIFIER=11 -BACKQUOTED_IDENTIFIER=12 -SIMPLE_COMMENT=13 -BRACKETED_EMPTY_COMMENT=14 -BRACKETED_COMMENT=15 -WS=16 -'.'=1 -'TOPICS'=4 -'CONSUMERS'=5 -'/**/'=14 diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParseException.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParseException.java new file mode 100644 index 0000000000..eaec3275e9 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParseException.java @@ -0,0 +1,15 @@ +package io.edurt.datacap.sql; + +public class SQLParseException + extends RuntimeException +{ + public SQLParseException(String message) + { + super(message); + } + + public SQLParseException(String message, Throwable cause) + { + super(message, cause); + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParser.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParser.java new file mode 100644 index 0000000000..08a7123b6f --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParser.java @@ -0,0 +1,39 @@ +package io.edurt.datacap.sql; + +import io.edurt.datacap.sql.parser.SqlBaseLexer; +import io.edurt.datacap.sql.parser.SqlBaseParser; +import io.edurt.datacap.sql.statement.SQLStatement; +import org.antlr.v4.runtime.CharStreams; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.tree.ParseTree; + +public class SQLParser +{ + public static SQLStatement parse(String sql) + { + try { + // 创建词法分析器和语法分析器 + // Create lexer and parser instance + SqlBaseLexer lexer = new SqlBaseLexer(CharStreams.fromString(sql)); + CommonTokenStream tokens = new CommonTokenStream(lexer); + SqlBaseParser parser = new SqlBaseParser(tokens); + + // 使用自定义错误监听器 + // Use custom error listener + parser.removeErrorListeners(); + parser.addErrorListener(new SQLParserErrorListener()); + + // 获取解析树 + // Get parse tree + ParseTree tree = parser.singleStatement(); + + // 访问解析树 + // Visit parse tree + SQLVisitor visitor = new SQLVisitor(); + return visitor.visit(tree); + } + catch (Exception e) { + throw new SQLParseException("Failed to parse SQL: " + e.getMessage(), e); + } + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/UnderLineListener.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParserErrorListener.java similarity index 63% rename from core/datacap-parser/src/main/java/io/edurt/datacap/sql/UnderLineListener.java rename to core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParserErrorListener.java index cb0135bebf..5b44b6ca18 100644 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/UnderLineListener.java +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLParserErrorListener.java @@ -1,19 +1,20 @@ package io.edurt.datacap.sql; -import com.google.common.base.Preconditions; import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Recognizer; -public class UnderLineListener +public class SQLParserErrorListener extends BaseErrorListener { + @Override public void syntaxError(Recognizer recognizer, Object offendingSymbol, - int line, int charPositionInLine, + int line, + int charPositionInLine, String msg, RecognitionException e) { - Preconditions.checkState(false, "line " + line + ":" + charPositionInLine + " " + msg); + throw new SQLParseException(String.format("line %d:%d %s", line, charPositionInLine, msg)); } } diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLVisitor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLVisitor.java new file mode 100644 index 0000000000..a21a5cc672 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SQLVisitor.java @@ -0,0 +1,400 @@ +package io.edurt.datacap.sql; + +import io.edurt.datacap.sql.node.Expression; +import io.edurt.datacap.sql.node.clause.JoinClause; +import io.edurt.datacap.sql.node.clause.LimitClause; +import io.edurt.datacap.sql.node.element.OrderByElement; +import io.edurt.datacap.sql.node.element.SelectElement; +import io.edurt.datacap.sql.node.element.TableElement; +import io.edurt.datacap.sql.parser.SqlBaseBaseVisitor; +import io.edurt.datacap.sql.parser.SqlBaseParser; +import io.edurt.datacap.sql.processor.ExpressionProcessor; +import io.edurt.datacap.sql.processor.ShowProcessor; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.SelectStatement; + +import java.util.ArrayList; +import java.util.List; + +public class SQLVisitor + extends SqlBaseBaseVisitor +{ + @Override + public SQLStatement visitSingleStatement(SqlBaseParser.SingleStatementContext ctx) + { + return visit(ctx.statement(0)); + } + + @Override + public SQLStatement visitStatement(SqlBaseParser.StatementContext ctx) + { + if (ctx.selectStatement() != null) { + return visitSelectStatement(ctx.selectStatement()); + } + else if (ctx.insertStatement() != null) { + return visitInsertStatement(ctx.insertStatement()); + } + else if (ctx.updateStatement() != null) { + return visitUpdateStatement(ctx.updateStatement()); + } + else if (ctx.deleteStatement() != null) { + return visitDeleteStatement(ctx.deleteStatement()); + } + else if (ctx.createStatement() != null) { + return visitCreateStatement(ctx.createStatement()); + } + else if (ctx.alterStatement() != null) { + return visitAlterStatement(ctx.alterStatement()); + } + else if (ctx.dropStatement() != null) { + return visitDropStatement(ctx.dropStatement()); + } + else if (ctx.useStatement() != null) { + return visitUseStatement(ctx.useStatement()); + } + else if (ctx.showStatement() != null) { + return visitShowStatement(ctx.showStatement()); + } + return null; + } + + @Override + public SQLStatement visitSelectStatement(SqlBaseParser.SelectStatementContext ctx) + { + SelectStatement statement = new SelectStatement(); + + // Parse SELECT elements + if (ctx.queryExpression().queryTerm().queryPrimary().querySpecification() != null) { + SqlBaseParser.QuerySpecificationContext querySpec = + ctx.queryExpression().queryTerm().queryPrimary().querySpecification(); + + statement.setSelectElements(processSelectElements(querySpec.selectElements())); + + // Parse FROM clause + if (querySpec.fromClause() != null) { + statement.setFromSources(processFromClause(querySpec.fromClause())); + } + + // Parse WHERE clause + if (querySpec.whereClause() != null) { + statement.setWhereClause(processExpression(querySpec.whereClause().expression())); + } + + // Parse GROUP BY clause + if (querySpec.groupByClause() != null) { + statement.setGroupByElements(visitGroupByElements(querySpec.groupByClause())); + } + + // Parse HAVING clause + if (querySpec.havingClause() != null) { + statement.setHavingClause(processExpression(querySpec.havingClause().expression())); + } + } + + // Parse ORDER BY clause + if (ctx.orderByClause() != null) { + statement.setOrderByElements(visitOrderByElements(ctx.orderByClause())); + } + + // Parse LIMIT clause + if (ctx.limitClause() != null) { + statement.setLimitClause(processLimitClause(ctx.limitClause())); + } + + return statement; + } + + @Override + public SQLStatement visitQueryExpression(SqlBaseParser.QueryExpressionContext ctx) + { + return visit(ctx.queryTerm()); + } + + @Override + public SQLStatement visitQueryTerm(SqlBaseParser.QueryTermContext ctx) + { + return visit(ctx.queryPrimary()); + } + + @Override + public SQLStatement visitQueryPrimary(SqlBaseParser.QueryPrimaryContext ctx) + { + if (ctx.querySpecification() != null) { + return visit(ctx.querySpecification()); + } + else if (ctx.queryExpression() != null) { + return visit(ctx.queryExpression()); + } + return null; + } + + @Override + public SQLStatement visitInsertStatement(SqlBaseParser.InsertStatementContext ctx) + { + // TODO: Implement insert statement parsing + return null; + } + + @Override + public SQLStatement visitUpdateStatement(SqlBaseParser.UpdateStatementContext ctx) + { + // TODO: Implement update statement parsing + return null; + } + + @Override + public SQLStatement visitDeleteStatement(SqlBaseParser.DeleteStatementContext ctx) + { + // TODO: Implement delete statement parsing + return null; + } + + @Override + public SQLStatement visitCreateStatement(SqlBaseParser.CreateStatementContext ctx) + { + // TODO: Implement create statement parsing + return null; + } + + @Override + public SQLStatement visitAlterStatement(SqlBaseParser.AlterStatementContext ctx) + { + // TODO: Implement alter statement parsing + return null; + } + + @Override + public SQLStatement visitDropStatement(SqlBaseParser.DropStatementContext ctx) + { + // TODO: Implement drop statement parsing + return null; + } + + @Override + public SQLStatement visitUseStatement(SqlBaseParser.UseStatementContext ctx) + { + // TODO: Implement use statement parsing + return null; + } + + @Override + public SQLStatement visitShowStatement(SqlBaseParser.ShowStatementContext ctx) + { + ShowProcessor processor = new ShowProcessor(); + return processor.process(ctx); + } + + @Override + public SQLStatement visitQuerySpecification(SqlBaseParser.QuerySpecificationContext ctx) + { + SelectStatement statement = new SelectStatement(); + statement.setSelectElements(processSelectElements(ctx.selectElements())); + + if (ctx.fromClause() != null) { + statement.setFromSources(processFromClause(ctx.fromClause())); + } + + if (ctx.whereClause() != null) { + statement.setWhereClause(processExpression(ctx.whereClause().expression())); + } + + if (ctx.groupByClause() != null) { + statement.setGroupByElements(visitGroupByElements(ctx.groupByClause())); + } + + if (ctx.havingClause() != null) { + statement.setHavingClause(processExpression(ctx.havingClause().expression())); + } + + return statement; + } + + private List processSelectElements(SqlBaseParser.SelectElementsContext ctx) + { + List elements = new ArrayList<>(); + + for (SqlBaseParser.SelectElementContext elementCtx : ctx.selectElement()) { + SelectElement element = new SelectElement(); + + if (elementCtx.columnName() != null) { + // 直接指定的列名 + // Directly specified column names + element.setColumn(elementCtx.columnName().getText()); + } + // 处理表达式 + // Handle expression + if (elementCtx.expression() != null) { + Expression expr = processExpression(elementCtx.expression()); + element.setExpression(expr); + + // 处理函数调用的情况 + // Handle function call + if (expr.getType() == Expression.ExpressionType.FUNCTION) { + // 尝试从函数的参数中获取列名 + // Try to get column name from function parameters + if (expr.getChildren() != null && !expr.getChildren().isEmpty()) { + Expression columnExpr = expr.getChildren().get(0); + if (columnExpr.getType() == Expression.ExpressionType.COLUMN_REFERENCE) { + element.setColumn(columnExpr.getValue().toString()); + } + } + } + } + // 处理别名 + // Handle alias + if (elementCtx.alias() != null) { + element.setAlias(elementCtx.alias().getText()); + } + + elements.add(element); + } + + return elements; + } + + private List processFromClause(SqlBaseParser.FromClauseContext ctx) + { + List tables = new ArrayList<>(); + + for (SqlBaseParser.TableSourceContext sourceCtx : ctx.tableSource()) { + TableElement table = new TableElement(); + + // 普通获取主表信息 + // Get the primary table information + if (sourceCtx.tablePrimary() != null) { + SqlBaseParser.TablePrimaryContext primaryCtx = sourceCtx.tablePrimary(); + + // 处理子查询 + // Handle subquery + if (primaryCtx.selectStatement() != null) { + // 处理子查询的别名 + // Handle the alias of the subquery + if (primaryCtx.alias() != null) { + table.setAlias(primaryCtx.alias().getText()); + } + // 可以选择存储子查询的SelectStatement + SelectStatement subquery = (SelectStatement) visit(primaryCtx.selectStatement()); + table.setSubquery(subquery); + } + // 处理普通表 + // Handle normal tables + else if (primaryCtx.tableName() != null) { + table.setTableName(primaryCtx.tableName().getText()); + if (primaryCtx.alias() != null) { + table.setAlias(primaryCtx.alias().getText()); + } + } + } + + if (sourceCtx.joinedTable() != null) { + List joins = new ArrayList<>(); + + // 从 joinedTable 的 tablePrimary 中获取主表信息 + // Get the primary table information from the tablePrimary of joinedTable + SqlBaseParser.TablePrimaryContext primaryCtx = sourceCtx.joinedTable().tablePrimary(); + if (primaryCtx != null && primaryCtx.tableName() != null) { + table.setTableName(primaryCtx.tableName().getText()); + if (primaryCtx.alias() != null) { + table.setAlias(primaryCtx.alias().getText()); + } + } + + // 处理 joins + // Handle joins + for (SqlBaseParser.JoinClauseContext joinCtx : sourceCtx.joinedTable().joinClause()) { + JoinClause join = processJoinClause(joinCtx); + joins.add(join); + } + table.setJoins(joins); + } + + tables.add(table); + } + + return tables; + } + + private JoinClause processJoinClause(SqlBaseParser.JoinClauseContext ctx) + { + JoinClause join = new JoinClause(); + + if (ctx.joinTypeClause() != null) { + if (ctx.joinTypeClause().INNER() != null) { + join.setJoinType(JoinClause.JoinType.INNER); + } + else if (ctx.joinTypeClause().LEFT() != null) { + join.setJoinType(JoinClause.JoinType.LEFT); + } + else if (ctx.joinTypeClause().RIGHT() != null) { + join.setJoinType(JoinClause.JoinType.RIGHT); + } + else if (ctx.joinTypeClause().FULL() != null) { + join.setJoinType(JoinClause.JoinType.FULL); + } + } + + TableElement rightTable = new TableElement(); + rightTable.setTableName(ctx.tablePrimary().tableName().getText()); + if (ctx.tablePrimary().alias() != null) { + rightTable.setAlias(ctx.tablePrimary().alias().getText()); + } + join.setRightTable(rightTable); + + if (ctx.joinCondition() != null) { + if (ctx.joinCondition().ON() != null) { + join.setCondition(processExpression(ctx.joinCondition().expression())); + } + } + + return join; + } + + private Expression processExpression(SqlBaseParser.ExpressionContext ctx) + { + ExpressionProcessor processor = new ExpressionProcessor(); + return processor.visit(ctx); + } + + private List visitGroupByElements(SqlBaseParser.GroupByClauseContext ctx) + { + List groupByElements = new ArrayList<>(); + + for (SqlBaseParser.GroupByElementContext elementCtx : ctx.groupByElement()) { + groupByElements.add(processExpression(elementCtx.expression())); + } + + return groupByElements; + } + + private List visitOrderByElements(SqlBaseParser.OrderByClauseContext ctx) + { + List orderByElements = new ArrayList<>(); + + for (SqlBaseParser.OrderByElementContext elementCtx : ctx.orderByElement()) { + OrderByElement element = new OrderByElement(); + element.setExpression(processExpression(elementCtx.expression())); + element.setAscending(elementCtx.DESC() == null); + orderByElements.add(element); + } + + return orderByElements; + } + + private LimitClause processLimitClause(SqlBaseParser.LimitClauseContext ctx) + { + LimitClause limit = new LimitClause(); + + if (ctx.INTEGER_VALUE().size() > 1) { + limit.setOffset(Long.parseLong(ctx.INTEGER_VALUE(0).getText())); + limit.setLimit(Long.parseLong(ctx.INTEGER_VALUE(1).getText())); + } + else { + limit.setLimit(Long.parseLong(ctx.INTEGER_VALUE(0).getText())); + if (ctx.OFFSET() != null) { + limit.setOffset(Long.parseLong(ctx.INTEGER_VALUE(1).getText())); + } + } + + return limit; + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBase.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBase.java deleted file mode 100644 index 2a6d026916..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBase.java +++ /dev/null @@ -1,25 +0,0 @@ -package io.edurt.datacap.sql; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.ToString; - -import java.util.ArrayList; -import java.util.List; - -@Data -@ToString -@NoArgsConstructor -@AllArgsConstructor -@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REPR", "EI_EXPOSE_REP2"}) -public class SqlBase -{ - private String token; - private String childToken; - private List columns = new ArrayList<>(); - private String table; - private boolean successful = false; - private String message; -} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseFormatter.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseFormatter.java deleted file mode 100644 index a45ab4908d..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseFormatter.java +++ /dev/null @@ -1,61 +0,0 @@ -package io.edurt.datacap.sql; - -import com.google.common.base.Preconditions; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.edurt.datacap.sql.parser.SqlBaseLexer; -import io.edurt.datacap.sql.parser.SqlBaseParser; -import lombok.Getter; -import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.CharStreams; -import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.atn.PredictionMode; -import org.antlr.v4.runtime.tree.ParseTree; -import org.apache.commons.lang3.ObjectUtils; - -@SuppressFBWarnings(value = {"EI_EXPOSE_REP"}) -public class SqlBaseFormatter -{ - private final String sql; - - @Getter - private SqlBase parseResult; - - public SqlBaseFormatter(String sql) - { - this.sql = sql; - this.parseResult = new SqlBase(); - this.initialize(); - } - - private SqlBase initialize() - { - CharStream stream = CharStreams.fromString(this.sql); - SqlBaseLexer lexer = new SqlBaseLexer(new SqlCaseInsensitiveStream(stream)); - lexer.removeErrorListeners(); - lexer.addErrorListener(new UnderLineListener()); - CommonTokenStream tokens = new CommonTokenStream(lexer); - SqlBaseParser parser = new SqlBaseParser(tokens); - parser.removeErrorListeners(); - parser.addErrorListener(new UnderLineListener()); - parser.getInterpreter().setPredictionMode(PredictionMode.SLL); - - ParseTree tree; - try { - tree = parser.singleStatement(); - } - catch (Exception ex) { - this.parseResult.setMessage(ex.getMessage()); - this.parseResult.setSuccessful(false); - return this.parseResult; - } - - SqlBaseVisitor sqlBaseVisitor = new SqlBaseVisitor(); - if (ObjectUtils.isNotEmpty(tree)) { - this.parseResult = sqlBaseVisitor.visit(tree); - } - else { - Preconditions.checkArgument(false, "Not support sql"); - } - return this.parseResult; - } -} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseToken.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseToken.java deleted file mode 100644 index 297c65e6e8..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseToken.java +++ /dev/null @@ -1,11 +0,0 @@ -package io.edurt.datacap.sql; - -public enum SqlBaseToken -{ - SHOW, - PATHS, - SELECT, - // ------ Kafka ------ - TOPICS, - CONSUMERS, -} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseVisitor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseVisitor.java deleted file mode 100644 index a5361369c0..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlBaseVisitor.java +++ /dev/null @@ -1,136 +0,0 @@ -package io.edurt.datacap.sql; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.edurt.datacap.sql.parser.SqlBaseParser; -import org.antlr.v4.runtime.tree.ErrorNode; -import org.antlr.v4.runtime.tree.ParseTree; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; -import org.antlr.v4.runtime.tree.RuleNode; -import org.antlr.v4.runtime.tree.TerminalNode; -import org.apache.commons.lang3.exception.ExceptionUtils; - -import java.util.Arrays; - -@SuppressFBWarnings(value = {"EI_EXPOSE_REP"}) -public class SqlBaseVisitor - implements ParseTreeVisitor -{ - private SqlBase configure; - - public SqlBaseVisitor() - { - configure = new SqlBase(); - } - - @Override - public SqlBase visit(ParseTree parseTree) - { - int childCount = parseTree.getChildCount(); - for (int i = 0; i < childCount; i++) { - ParseTree child = parseTree.getChild(i); - if (child instanceof SqlBaseParser.StatementContext) { - SqlBaseParser.StatementContext statementContext = (SqlBaseParser.StatementContext) child; - this.handlerWithStatement(statementContext); - } - } - return this.configure; - } - - @Override - public SqlBase visitChildren(RuleNode ruleNode) - { - return this.configure; - } - - @Override - public SqlBase visitTerminal(TerminalNode terminalNode) - { - return this.configure; - } - - @Override - public SqlBase visitErrorNode(ErrorNode errorNode) - { - this.configure.setSuccessful(false); - this.configure.setMessage(errorNode.getText()); - return configure; - } - - private void handlerWithStatement(SqlBaseParser.StatementContext statementContext) - { - int childCount = statementContext.getChildCount(); - int i = 0; - for (; i < childCount; i++) { - ParseTree child = statementContext.getChild(i); - /* Kafka Statement */ - if (child instanceof SqlBaseParser.KafkaStatementContext) { - this.handlerWithKafkaStatement((SqlBaseParser.KafkaStatementContext) child); - } - else if (child instanceof SqlBaseParser.ColumnStatementContext) { - configure.setColumns(Arrays.asList(child.getText())); - } - else if (child instanceof SqlBaseParser.FromClauseContext) { - configure.setTable(child.getChild(1).getText()); - } - else if (child instanceof SqlBaseParser.ChildPathStatementContext) { - this.handlerWithChildPathStatementContext((SqlBaseParser.ChildPathStatementContext) child); - } - else if (child instanceof TerminalNode) { - this.applyToken(child.getText(), false); - } - } - } - - private void handlerWithChildPathStatementContext(SqlBaseParser.ChildPathStatementContext context) - { - int childCount = context.getChildCount(); - int i = 0; - for (; i < childCount; i++) { - ParseTree child = context.getChild(i); - if (child instanceof SqlBaseParser.FromClauseContext) { - this.configure.setTable(child.getChild(1).getText()); - } - else if (child instanceof TerminalNode) { - this.applyToken(child.getText(), true); - } - } - } - - private void handlerWithKafkaStatement(SqlBaseParser.KafkaStatementContext context) - { - ParseTree node = context.getChild(0); - if (node instanceof SqlBaseParser.KafkaQueryStatementContext) { - ParseTree queryNode = node.getChild(0); - if (queryNode instanceof SqlBaseParser.KafkaQueryTopicStatementContext - | queryNode instanceof SqlBaseParser.KafkaQueryConsumerStatementContext) { - int count = queryNode.getChildCount(); - this.applyToken(queryNode.getChild(0).getText(), false); - this.applyToken(queryNode.getChild(1).getText(), true); - // If the total number is greater than 2, the mark specifies the topic, which is the table name - if (count > 2) { - ParseTree fromNode = queryNode.getChild(count - 1); - if (fromNode instanceof SqlBaseParser.FromClauseContext) { - configure.setTable(fromNode.getChild(1).getText()); - } - } - } - } - } - - private void applyToken(String token, boolean isChild) - { - try { - if (isChild) { - this.configure.setChildToken(token.toUpperCase()); - } - else { - this.configure.setToken(token.toUpperCase()); - } - configure.setSuccessful(true); - } - catch (Exception exception) { - configure.setSuccessful(false); - configure.setMessage(ExceptionUtils.getMessage(exception)); - } - } -} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlCaseInsensitiveStream.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlCaseInsensitiveStream.java deleted file mode 100644 index 4e8c78ee4f..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/SqlCaseInsensitiveStream.java +++ /dev/null @@ -1,78 +0,0 @@ -package io.edurt.datacap.sql; - -import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.IntStream; -import org.antlr.v4.runtime.misc.Interval; - -public class SqlCaseInsensitiveStream - implements CharStream -{ - private final CharStream stream; - - public SqlCaseInsensitiveStream(CharStream stream) - { - this.stream = stream; - } - - @Override - public String getText(Interval interval) - { - return stream.getText(interval); - } - - @Override - public void consume() - { - stream.consume(); - } - - @Override - public int LA(int i) - { - int result = stream.LA(i); - - switch (result) { - case 0: - case IntStream.EOF: - return result; - default: - return Character.toUpperCase(result); - } - } - - @Override - public int mark() - { - return stream.mark(); - } - - @Override - public void release(int marker) - { - stream.release(marker); - } - - @Override - public int index() - { - return stream.index(); - } - - @Override - public void seek(int index) - { - stream.seek(index); - } - - @Override - public int size() - { - return stream.size(); - } - - @Override - public String getSourceName() - { - return stream.getSourceName(); - } -} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/Expression.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/Expression.java new file mode 100644 index 0000000000..8423795392 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/Expression.java @@ -0,0 +1,24 @@ +package io.edurt.datacap.sql.node; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import lombok.Getter; +import lombok.Setter; +import lombok.ToString; + +import java.util.List; + +@Getter +@Setter +@ToString +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class Expression +{ + private ExpressionType type; + private Object value; + private List children; + + public enum ExpressionType + { + LITERAL, COLUMN_REFERENCE, FUNCTION_CALL, BINARY_OP, UNARY_OP, FUNCTION + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/clause/JoinClause.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/clause/JoinClause.java new file mode 100644 index 0000000000..897e519db2 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/clause/JoinClause.java @@ -0,0 +1,22 @@ +package io.edurt.datacap.sql.node.clause; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import io.edurt.datacap.sql.node.element.TableElement; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class JoinClause +{ + private JoinType joinType; + private TableElement rightTable; + private Expression condition; + + public enum JoinType + { + INNER, LEFT, RIGHT, FULL, CROSS, NATURAL + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/clause/LimitClause.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/clause/LimitClause.java new file mode 100644 index 0000000000..242e37533c --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/clause/LimitClause.java @@ -0,0 +1,14 @@ +package io.edurt.datacap.sql.node.clause; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class LimitClause +{ + private long limit; + private long offset; +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/OrderByElement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/OrderByElement.java new file mode 100644 index 0000000000..a97294f439 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/OrderByElement.java @@ -0,0 +1,15 @@ +package io.edurt.datacap.sql.node.element; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class OrderByElement +{ + private Expression expression; + private boolean ascending; +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/SelectElement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/SelectElement.java new file mode 100644 index 0000000000..3ee9c46f87 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/SelectElement.java @@ -0,0 +1,16 @@ +package io.edurt.datacap.sql.node.element; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class SelectElement +{ + private String column; + private String alias; + private Expression expression; +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/TableElement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/TableElement.java new file mode 100644 index 0000000000..ae2e6d1478 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/node/element/TableElement.java @@ -0,0 +1,20 @@ +package io.edurt.datacap.sql.node.element; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.clause.JoinClause; +import io.edurt.datacap.sql.statement.SelectStatement; +import lombok.Getter; +import lombok.Setter; + +import java.util.List; + +@Getter +@Setter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class TableElement +{ + private String tableName; + private String alias; + private List joins; + private SelectStatement subquery; +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBase.interp b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBase.interp deleted file mode 100644 index b46bdc67ef..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBase.interp +++ /dev/null @@ -1,55 +0,0 @@ -token literal names: -null -'.' -null -null -'TOPICS' -'CONSUMERS' -null -null -null -null -null -null -null -null -'/**/' -null -null - -token symbolic names: -null -null -SHOW -PATHS -TOPICS -CONSUMERS -DATABASES -TABLES -SELECT -FROM -STRING -IDENTIFIER -BACKQUOTED_IDENTIFIER -SIMPLE_COMMENT -BRACKETED_EMPTY_COMMENT -BRACKETED_COMMENT -WS - -rule names: -singleStatement -statement -kafkaQueryTopicStatement -kafkaQueryConsumerStatement -kafkaQueryStatement -kafkaStatement -childPathStatement -columnStatement -fromClause -tableName -identifier -quotedIdentifier - - -atn: -[4, 1, 16, 92, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 1, 0, 5, 0, 26, 8, 0, 10, 0, 12, 0, 29, 9, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 38, 8, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 44, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 56, 8, 3, 1, 4, 1, 4, 3, 4, 60, 8, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 3, 6, 67, 8, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 77, 8, 9, 10, 9, 12, 9, 80, 9, 9, 1, 10, 1, 10, 1, 10, 5, 10, 85, 8, 10, 10, 10, 12, 10, 88, 9, 10, 1, 11, 1, 11, 1, 11, 0, 0, 12, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 0, 0, 92, 0, 27, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 43, 1, 0, 0, 0, 6, 55, 1, 0, 0, 0, 8, 59, 1, 0, 0, 0, 10, 61, 1, 0, 0, 0, 12, 66, 1, 0, 0, 0, 14, 68, 1, 0, 0, 0, 16, 70, 1, 0, 0, 0, 18, 73, 1, 0, 0, 0, 20, 86, 1, 0, 0, 0, 22, 89, 1, 0, 0, 0, 24, 26, 3, 2, 1, 0, 25, 24, 1, 0, 0, 0, 26, 29, 1, 0, 0, 0, 27, 25, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 1, 1, 0, 0, 0, 29, 27, 1, 0, 0, 0, 30, 31, 5, 2, 0, 0, 31, 38, 3, 12, 6, 0, 32, 33, 5, 8, 0, 0, 33, 34, 3, 14, 7, 0, 34, 35, 3, 16, 8, 0, 35, 38, 1, 0, 0, 0, 36, 38, 3, 10, 5, 0, 37, 30, 1, 0, 0, 0, 37, 32, 1, 0, 0, 0, 37, 36, 1, 0, 0, 0, 38, 3, 1, 0, 0, 0, 39, 40, 5, 2, 0, 0, 40, 44, 5, 4, 0, 0, 41, 42, 5, 2, 0, 0, 42, 44, 5, 6, 0, 0, 43, 39, 1, 0, 0, 0, 43, 41, 1, 0, 0, 0, 44, 5, 1, 0, 0, 0, 45, 46, 5, 2, 0, 0, 46, 56, 5, 5, 0, 0, 47, 48, 5, 2, 0, 0, 48, 49, 5, 5, 0, 0, 49, 56, 3, 16, 8, 0, 50, 51, 5, 2, 0, 0, 51, 56, 5, 7, 0, 0, 52, 53, 5, 2, 0, 0, 53, 54, 5, 7, 0, 0, 54, 56, 3, 16, 8, 0, 55, 45, 1, 0, 0, 0, 55, 47, 1, 0, 0, 0, 55, 50, 1, 0, 0, 0, 55, 52, 1, 0, 0, 0, 56, 7, 1, 0, 0, 0, 57, 60, 3, 4, 2, 0, 58, 60, 3, 6, 3, 0, 59, 57, 1, 0, 0, 0, 59, 58, 1, 0, 0, 0, 60, 9, 1, 0, 0, 0, 61, 62, 3, 8, 4, 0, 62, 11, 1, 0, 0, 0, 63, 67, 5, 3, 0, 0, 64, 65, 5, 3, 0, 0, 65, 67, 3, 16, 8, 0, 66, 63, 1, 0, 0, 0, 66, 64, 1, 0, 0, 0, 67, 13, 1, 0, 0, 0, 68, 69, 3, 20, 10, 0, 69, 15, 1, 0, 0, 0, 70, 71, 5, 9, 0, 0, 71, 72, 3, 18, 9, 0, 72, 17, 1, 0, 0, 0, 73, 78, 3, 20, 10, 0, 74, 75, 5, 1, 0, 0, 75, 77, 3, 20, 10, 0, 76, 74, 1, 0, 0, 0, 77, 80, 1, 0, 0, 0, 78, 76, 1, 0, 0, 0, 78, 79, 1, 0, 0, 0, 79, 19, 1, 0, 0, 0, 80, 78, 1, 0, 0, 0, 81, 85, 5, 11, 0, 0, 82, 85, 5, 10, 0, 0, 83, 85, 3, 22, 11, 0, 84, 81, 1, 0, 0, 0, 84, 82, 1, 0, 0, 0, 84, 83, 1, 0, 0, 0, 85, 88, 1, 0, 0, 0, 86, 84, 1, 0, 0, 0, 86, 87, 1, 0, 0, 0, 87, 21, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89, 90, 5, 12, 0, 0, 90, 23, 1, 0, 0, 0, 9, 27, 37, 43, 55, 59, 66, 78, 84, 86] \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseListener.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseListener.java deleted file mode 100644 index 65b505500d..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseListener.java +++ /dev/null @@ -1,184 +0,0 @@ -// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0 -package io.edurt.datacap.sql.parser; - -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.tree.ErrorNode; -import org.antlr.v4.runtime.tree.TerminalNode; - -/** - * This class provides an empty implementation of {@link SqlBaseListener}, - * which can be extended to create a listener which only needs to handle a subset - * of the available methods. - */ -@SuppressWarnings("CheckReturnValue") -public class SqlBaseBaseListener implements SqlBaseListener { - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterSingleStatement(SqlBaseParser.SingleStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitSingleStatement(SqlBaseParser.SingleStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterStatement(SqlBaseParser.StatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitStatement(SqlBaseParser.StatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterKafkaStatement(SqlBaseParser.KafkaStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitKafkaStatement(SqlBaseParser.KafkaStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterColumnStatement(SqlBaseParser.ColumnStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitColumnStatement(SqlBaseParser.ColumnStatementContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterFromClause(SqlBaseParser.FromClauseContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitFromClause(SqlBaseParser.FromClauseContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterTableName(SqlBaseParser.TableNameContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitTableName(SqlBaseParser.TableNameContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterIdentifier(SqlBaseParser.IdentifierContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitIdentifier(SqlBaseParser.IdentifierContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx) { } - - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterEveryRule(ParserRuleContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitEveryRule(ParserRuleContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void visitTerminal(TerminalNode node) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void visitErrorNode(ErrorNode node) { } -} \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseVisitor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseVisitor.java deleted file mode 100644 index 0b54b22aa2..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseBaseVisitor.java +++ /dev/null @@ -1,99 +0,0 @@ -// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0 -package io.edurt.datacap.sql.parser; -import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; - -/** - * This class provides an empty implementation of {@link SqlBaseVisitor}, - * which can be extended to create a visitor which only needs to handle a subset - * of the available methods. - * - * @param The return type of the visit operation. Use {@link Void} for - * operations with no return type. - */ -@SuppressWarnings("CheckReturnValue") -public class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBaseVisitor { - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitSingleStatement(SqlBaseParser.SingleStatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitStatement(SqlBaseParser.StatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitKafkaStatement(SqlBaseParser.KafkaStatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitColumnStatement(SqlBaseParser.ColumnStatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitFromClause(SqlBaseParser.FromClauseContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitTableName(SqlBaseParser.TableNameContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitIdentifier(SqlBaseParser.IdentifierContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx) { return visitChildren(ctx); } -} \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.interp b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.interp deleted file mode 100644 index d8822ced22..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.interp +++ /dev/null @@ -1,67 +0,0 @@ -token literal names: -null -'.' -null -null -'TOPICS' -'CONSUMERS' -null -null -null -null -null -null -null -null -'/**/' -null -null - -token symbolic names: -null -null -SHOW -PATHS -TOPICS -CONSUMERS -DATABASES -TABLES -SELECT -FROM -STRING -IDENTIFIER -BACKQUOTED_IDENTIFIER -SIMPLE_COMMENT -BRACKETED_EMPTY_COMMENT -BRACKETED_COMMENT -WS - -rule names: -T__0 -SHOW -PATHS -TOPICS -CONSUMERS -DATABASES -TABLES -SELECT -FROM -DIGIT -LETTER -STRING -IDENTIFIER -BACKQUOTED_IDENTIFIER -SIMPLE_COMMENT -BRACKETED_EMPTY_COMMENT -BRACKETED_COMMENT -WS - -channel names: -DEFAULT_TOKEN_CHANNEL -HIDDEN - -mode names: -DEFAULT_MODE - -atn: -[4, 0, 16, 187, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 105, 8, 11, 10, 11, 12, 11, 108, 9, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 115, 8, 11, 10, 11, 12, 11, 118, 9, 11, 1, 11, 1, 11, 3, 11, 122, 8, 11, 1, 12, 1, 12, 1, 12, 4, 12, 127, 8, 12, 11, 12, 12, 12, 128, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 135, 8, 13, 10, 13, 12, 13, 138, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 146, 8, 14, 10, 14, 12, 14, 149, 9, 14, 1, 14, 3, 14, 152, 8, 14, 1, 14, 3, 14, 155, 8, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 171, 8, 16, 10, 16, 12, 16, 174, 9, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 4, 17, 182, 8, 17, 11, 17, 12, 17, 183, 1, 17, 1, 17, 1, 172, 0, 18, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 0, 21, 0, 23, 10, 25, 11, 27, 12, 29, 13, 31, 14, 33, 15, 35, 16, 1, 0, 24, 2, 0, 83, 83, 115, 115, 2, 0, 72, 72, 104, 104, 2, 0, 79, 79, 111, 111, 2, 0, 87, 87, 119, 119, 2, 0, 80, 80, 112, 112, 2, 0, 65, 65, 97, 97, 2, 0, 84, 84, 116, 116, 2, 0, 68, 68, 100, 100, 2, 0, 66, 66, 98, 98, 2, 0, 69, 69, 115, 115, 2, 0, 76, 76, 108, 108, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 70, 70, 102, 102, 2, 0, 82, 82, 114, 114, 2, 0, 77, 77, 109, 109, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 2, 0, 39, 39, 92, 92, 2, 0, 34, 34, 92, 92, 1, 0, 96, 96, 2, 0, 10, 10, 13, 13, 1, 0, 43, 43, 3, 0, 9, 10, 13, 13, 32, 32, 200, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 1, 37, 1, 0, 0, 0, 3, 39, 1, 0, 0, 0, 5, 44, 1, 0, 0, 0, 7, 50, 1, 0, 0, 0, 9, 57, 1, 0, 0, 0, 11, 67, 1, 0, 0, 0, 13, 77, 1, 0, 0, 0, 15, 84, 1, 0, 0, 0, 17, 91, 1, 0, 0, 0, 19, 96, 1, 0, 0, 0, 21, 98, 1, 0, 0, 0, 23, 121, 1, 0, 0, 0, 25, 126, 1, 0, 0, 0, 27, 130, 1, 0, 0, 0, 29, 141, 1, 0, 0, 0, 31, 158, 1, 0, 0, 0, 33, 165, 1, 0, 0, 0, 35, 181, 1, 0, 0, 0, 37, 38, 5, 46, 0, 0, 38, 2, 1, 0, 0, 0, 39, 40, 7, 0, 0, 0, 40, 41, 7, 1, 0, 0, 41, 42, 7, 2, 0, 0, 42, 43, 7, 3, 0, 0, 43, 4, 1, 0, 0, 0, 44, 45, 7, 4, 0, 0, 45, 46, 7, 5, 0, 0, 46, 47, 7, 6, 0, 0, 47, 48, 7, 1, 0, 0, 48, 49, 7, 0, 0, 0, 49, 6, 1, 0, 0, 0, 50, 51, 5, 84, 0, 0, 51, 52, 5, 79, 0, 0, 52, 53, 5, 80, 0, 0, 53, 54, 5, 73, 0, 0, 54, 55, 5, 67, 0, 0, 55, 56, 5, 83, 0, 0, 56, 8, 1, 0, 0, 0, 57, 58, 5, 67, 0, 0, 58, 59, 5, 79, 0, 0, 59, 60, 5, 78, 0, 0, 60, 61, 5, 83, 0, 0, 61, 62, 5, 85, 0, 0, 62, 63, 5, 77, 0, 0, 63, 64, 5, 69, 0, 0, 64, 65, 5, 82, 0, 0, 65, 66, 5, 83, 0, 0, 66, 10, 1, 0, 0, 0, 67, 68, 7, 7, 0, 0, 68, 69, 7, 5, 0, 0, 69, 70, 7, 6, 0, 0, 70, 71, 7, 5, 0, 0, 71, 72, 7, 8, 0, 0, 72, 73, 7, 5, 0, 0, 73, 74, 7, 0, 0, 0, 74, 75, 7, 9, 0, 0, 75, 76, 7, 0, 0, 0, 76, 12, 1, 0, 0, 0, 77, 78, 7, 6, 0, 0, 78, 79, 7, 5, 0, 0, 79, 80, 7, 8, 0, 0, 80, 81, 7, 10, 0, 0, 81, 82, 7, 11, 0, 0, 82, 83, 7, 0, 0, 0, 83, 14, 1, 0, 0, 0, 84, 85, 7, 0, 0, 0, 85, 86, 7, 11, 0, 0, 86, 87, 7, 10, 0, 0, 87, 88, 7, 11, 0, 0, 88, 89, 7, 12, 0, 0, 89, 90, 7, 6, 0, 0, 90, 16, 1, 0, 0, 0, 91, 92, 7, 13, 0, 0, 92, 93, 7, 14, 0, 0, 93, 94, 7, 2, 0, 0, 94, 95, 7, 15, 0, 0, 95, 18, 1, 0, 0, 0, 96, 97, 7, 16, 0, 0, 97, 20, 1, 0, 0, 0, 98, 99, 7, 17, 0, 0, 99, 22, 1, 0, 0, 0, 100, 106, 5, 39, 0, 0, 101, 105, 8, 18, 0, 0, 102, 103, 5, 92, 0, 0, 103, 105, 9, 0, 0, 0, 104, 101, 1, 0, 0, 0, 104, 102, 1, 0, 0, 0, 105, 108, 1, 0, 0, 0, 106, 104, 1, 0, 0, 0, 106, 107, 1, 0, 0, 0, 107, 109, 1, 0, 0, 0, 108, 106, 1, 0, 0, 0, 109, 122, 5, 39, 0, 0, 110, 116, 5, 34, 0, 0, 111, 115, 8, 19, 0, 0, 112, 113, 5, 92, 0, 0, 113, 115, 9, 0, 0, 0, 114, 111, 1, 0, 0, 0, 114, 112, 1, 0, 0, 0, 115, 118, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 117, 1, 0, 0, 0, 117, 119, 1, 0, 0, 0, 118, 116, 1, 0, 0, 0, 119, 122, 5, 34, 0, 0, 120, 122, 5, 42, 0, 0, 121, 100, 1, 0, 0, 0, 121, 110, 1, 0, 0, 0, 121, 120, 1, 0, 0, 0, 122, 24, 1, 0, 0, 0, 123, 127, 3, 21, 10, 0, 124, 127, 3, 19, 9, 0, 125, 127, 5, 95, 0, 0, 126, 123, 1, 0, 0, 0, 126, 124, 1, 0, 0, 0, 126, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 126, 1, 0, 0, 0, 128, 129, 1, 0, 0, 0, 129, 26, 1, 0, 0, 0, 130, 136, 5, 96, 0, 0, 131, 135, 8, 20, 0, 0, 132, 133, 5, 96, 0, 0, 133, 135, 5, 96, 0, 0, 134, 131, 1, 0, 0, 0, 134, 132, 1, 0, 0, 0, 135, 138, 1, 0, 0, 0, 136, 134, 1, 0, 0, 0, 136, 137, 1, 0, 0, 0, 137, 139, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 139, 140, 5, 96, 0, 0, 140, 28, 1, 0, 0, 0, 141, 142, 5, 45, 0, 0, 142, 143, 5, 45, 0, 0, 143, 147, 1, 0, 0, 0, 144, 146, 8, 21, 0, 0, 145, 144, 1, 0, 0, 0, 146, 149, 1, 0, 0, 0, 147, 145, 1, 0, 0, 0, 147, 148, 1, 0, 0, 0, 148, 151, 1, 0, 0, 0, 149, 147, 1, 0, 0, 0, 150, 152, 5, 13, 0, 0, 151, 150, 1, 0, 0, 0, 151, 152, 1, 0, 0, 0, 152, 154, 1, 0, 0, 0, 153, 155, 5, 10, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 6, 14, 0, 0, 157, 30, 1, 0, 0, 0, 158, 159, 5, 47, 0, 0, 159, 160, 5, 42, 0, 0, 160, 161, 5, 42, 0, 0, 161, 162, 5, 47, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, 6, 15, 0, 0, 164, 32, 1, 0, 0, 0, 165, 166, 5, 47, 0, 0, 166, 167, 5, 42, 0, 0, 167, 168, 1, 0, 0, 0, 168, 172, 8, 22, 0, 0, 169, 171, 9, 0, 0, 0, 170, 169, 1, 0, 0, 0, 171, 174, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 175, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 175, 176, 5, 42, 0, 0, 176, 177, 5, 47, 0, 0, 177, 178, 1, 0, 0, 0, 178, 179, 6, 16, 0, 0, 179, 34, 1, 0, 0, 0, 180, 182, 7, 23, 0, 0, 181, 180, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 6, 17, 0, 0, 186, 36, 1, 0, 0, 0, 15, 0, 104, 106, 114, 116, 121, 126, 128, 134, 136, 147, 151, 154, 172, 183, 1, 0, 1, 0] \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.java deleted file mode 100644 index b7fe9696af..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseLexer.java +++ /dev/null @@ -1,243 +0,0 @@ -// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0 -package io.edurt.datacap.sql.parser; -import org.antlr.v4.runtime.Lexer; -import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.atn.*; -import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.misc.*; - -@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) -public class SqlBaseLexer extends Lexer { - static { RuntimeMetaData.checkVersion("4.12.0", RuntimeMetaData.VERSION); } - - protected static final DFA[] _decisionToDFA; - protected static final PredictionContextCache _sharedContextCache = - new PredictionContextCache(); - public static final int - T__0=1, SHOW=2, PATHS=3, TOPICS=4, CONSUMERS=5, DATABASES=6, TABLES=7, - SELECT=8, FROM=9, STRING=10, IDENTIFIER=11, BACKQUOTED_IDENTIFIER=12, - SIMPLE_COMMENT=13, BRACKETED_EMPTY_COMMENT=14, BRACKETED_COMMENT=15, WS=16; - public static String[] channelNames = { - "DEFAULT_TOKEN_CHANNEL", "HIDDEN" - }; - - public static String[] modeNames = { - "DEFAULT_MODE" - }; - - private static String[] makeRuleNames() { - return new String[] { - "T__0", "SHOW", "PATHS", "TOPICS", "CONSUMERS", "DATABASES", "TABLES", - "SELECT", "FROM", "DIGIT", "LETTER", "STRING", "IDENTIFIER", "BACKQUOTED_IDENTIFIER", - "SIMPLE_COMMENT", "BRACKETED_EMPTY_COMMENT", "BRACKETED_COMMENT", "WS" - }; - } - public static final String[] ruleNames = makeRuleNames(); - - private static String[] makeLiteralNames() { - return new String[] { - null, "'.'", null, null, "'TOPICS'", "'CONSUMERS'", null, null, null, - null, null, null, null, null, "'/**/'" - }; - } - private static final String[] _LITERAL_NAMES = makeLiteralNames(); - private static String[] makeSymbolicNames() { - return new String[] { - null, null, "SHOW", "PATHS", "TOPICS", "CONSUMERS", "DATABASES", "TABLES", - "SELECT", "FROM", "STRING", "IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", - "BRACKETED_EMPTY_COMMENT", "BRACKETED_COMMENT", "WS" - }; - } - private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); - public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); - - /** - * @deprecated Use {@link #VOCABULARY} instead. - */ - @Deprecated - public static final String[] tokenNames; - static { - tokenNames = new String[_SYMBOLIC_NAMES.length]; - for (int i = 0; i < tokenNames.length; i++) { - tokenNames[i] = VOCABULARY.getLiteralName(i); - if (tokenNames[i] == null) { - tokenNames[i] = VOCABULARY.getSymbolicName(i); - } - - if (tokenNames[i] == null) { - tokenNames[i] = ""; - } - } - } - - @Override - @Deprecated - public String[] getTokenNames() { - return tokenNames; - } - - @Override - - public Vocabulary getVocabulary() { - return VOCABULARY; - } - - - public SqlBaseLexer(CharStream input) { - super(input); - _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); - } - - @Override - public String getGrammarFileName() { return "SqlBase.g4"; } - - @Override - public String[] getRuleNames() { return ruleNames; } - - @Override - public String getSerializedATN() { return _serializedATN; } - - @Override - public String[] getChannelNames() { return channelNames; } - - @Override - public String[] getModeNames() { return modeNames; } - - @Override - public ATN getATN() { return _ATN; } - - public static final String _serializedATN = - "\u0004\u0000\u0010\u00bb\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002"+ - "\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002"+ - "\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002"+ - "\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002"+ - "\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e"+ - "\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011"+ - "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000bi\b\u000b"+ - "\n\u000b\f\u000bl\t\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0005\u000bs\b\u000b\n\u000b\f\u000bv\t\u000b\u0001\u000b"+ - "\u0001\u000b\u0003\u000bz\b\u000b\u0001\f\u0001\f\u0001\f\u0004\f\u007f"+ - "\b\f\u000b\f\f\f\u0080\u0001\r\u0001\r\u0001\r\u0001\r\u0005\r\u0087\b"+ - "\r\n\r\f\r\u008a\t\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0005\u000e\u0092\b\u000e\n\u000e\f\u000e\u0095\t\u000e\u0001"+ - "\u000e\u0003\u000e\u0098\b\u000e\u0001\u000e\u0003\u000e\u009b\b\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0005\u0010\u00ab\b\u0010\n\u0010\f\u0010\u00ae"+ - "\t\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0011\u0004\u0011\u00b6\b\u0011\u000b\u0011\f\u0011\u00b7\u0001\u0011"+ - "\u0001\u0011\u0001\u00ac\u0000\u0012\u0001\u0001\u0003\u0002\u0005\u0003"+ - "\u0007\u0004\t\u0005\u000b\u0006\r\u0007\u000f\b\u0011\t\u0013\u0000\u0015"+ - "\u0000\u0017\n\u0019\u000b\u001b\f\u001d\r\u001f\u000e!\u000f#\u0010\u0001"+ - "\u0000\u0018\u0002\u0000SSss\u0002\u0000HHhh\u0002\u0000OOoo\u0002\u0000"+ - "WWww\u0002\u0000PPpp\u0002\u0000AAaa\u0002\u0000TTtt\u0002\u0000DDdd\u0002"+ - "\u0000BBbb\u0002\u0000EEss\u0002\u0000LLll\u0002\u0000EEee\u0002\u0000"+ - "CCcc\u0002\u0000FFff\u0002\u0000RRrr\u0002\u0000MMmm\u0001\u000009\u0002"+ - "\u0000AZaz\u0002\u0000\'\'\\\\\u0002\u0000\"\"\\\\\u0001\u0000``\u0002"+ - "\u0000\n\n\r\r\u0001\u0000++\u0003\u0000\t\n\r\r \u00c8\u0000\u0001\u0001"+ - "\u0000\u0000\u0000\u0000\u0003\u0001\u0000\u0000\u0000\u0000\u0005\u0001"+ - "\u0000\u0000\u0000\u0000\u0007\u0001\u0000\u0000\u0000\u0000\t\u0001\u0000"+ - "\u0000\u0000\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000\u0000"+ - "\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000"+ - "\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000"+ - "\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001\u0000\u0000"+ - "\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000\u0000\u0000"+ - "\u0000#\u0001\u0000\u0000\u0000\u0001%\u0001\u0000\u0000\u0000\u0003\'"+ - "\u0001\u0000\u0000\u0000\u0005,\u0001\u0000\u0000\u0000\u00072\u0001\u0000"+ - "\u0000\u0000\t9\u0001\u0000\u0000\u0000\u000bC\u0001\u0000\u0000\u0000"+ - "\rM\u0001\u0000\u0000\u0000\u000fT\u0001\u0000\u0000\u0000\u0011[\u0001"+ - "\u0000\u0000\u0000\u0013`\u0001\u0000\u0000\u0000\u0015b\u0001\u0000\u0000"+ - "\u0000\u0017y\u0001\u0000\u0000\u0000\u0019~\u0001\u0000\u0000\u0000\u001b"+ - "\u0082\u0001\u0000\u0000\u0000\u001d\u008d\u0001\u0000\u0000\u0000\u001f"+ - "\u009e\u0001\u0000\u0000\u0000!\u00a5\u0001\u0000\u0000\u0000#\u00b5\u0001"+ - "\u0000\u0000\u0000%&\u0005.\u0000\u0000&\u0002\u0001\u0000\u0000\u0000"+ - "\'(\u0007\u0000\u0000\u0000()\u0007\u0001\u0000\u0000)*\u0007\u0002\u0000"+ - "\u0000*+\u0007\u0003\u0000\u0000+\u0004\u0001\u0000\u0000\u0000,-\u0007"+ - "\u0004\u0000\u0000-.\u0007\u0005\u0000\u0000./\u0007\u0006\u0000\u0000"+ - "/0\u0007\u0001\u0000\u000001\u0007\u0000\u0000\u00001\u0006\u0001\u0000"+ - "\u0000\u000023\u0005T\u0000\u000034\u0005O\u0000\u000045\u0005P\u0000"+ - "\u000056\u0005I\u0000\u000067\u0005C\u0000\u000078\u0005S\u0000\u0000"+ - "8\b\u0001\u0000\u0000\u00009:\u0005C\u0000\u0000:;\u0005O\u0000\u0000"+ - ";<\u0005N\u0000\u0000<=\u0005S\u0000\u0000=>\u0005U\u0000\u0000>?\u0005"+ - "M\u0000\u0000?@\u0005E\u0000\u0000@A\u0005R\u0000\u0000AB\u0005S\u0000"+ - "\u0000B\n\u0001\u0000\u0000\u0000CD\u0007\u0007\u0000\u0000DE\u0007\u0005"+ - "\u0000\u0000EF\u0007\u0006\u0000\u0000FG\u0007\u0005\u0000\u0000GH\u0007"+ - "\b\u0000\u0000HI\u0007\u0005\u0000\u0000IJ\u0007\u0000\u0000\u0000JK\u0007"+ - "\t\u0000\u0000KL\u0007\u0000\u0000\u0000L\f\u0001\u0000\u0000\u0000MN"+ - "\u0007\u0006\u0000\u0000NO\u0007\u0005\u0000\u0000OP\u0007\b\u0000\u0000"+ - "PQ\u0007\n\u0000\u0000QR\u0007\u000b\u0000\u0000RS\u0007\u0000\u0000\u0000"+ - "S\u000e\u0001\u0000\u0000\u0000TU\u0007\u0000\u0000\u0000UV\u0007\u000b"+ - "\u0000\u0000VW\u0007\n\u0000\u0000WX\u0007\u000b\u0000\u0000XY\u0007\f"+ - "\u0000\u0000YZ\u0007\u0006\u0000\u0000Z\u0010\u0001\u0000\u0000\u0000"+ - "[\\\u0007\r\u0000\u0000\\]\u0007\u000e\u0000\u0000]^\u0007\u0002\u0000"+ - "\u0000^_\u0007\u000f\u0000\u0000_\u0012\u0001\u0000\u0000\u0000`a\u0007"+ - "\u0010\u0000\u0000a\u0014\u0001\u0000\u0000\u0000bc\u0007\u0011\u0000"+ - "\u0000c\u0016\u0001\u0000\u0000\u0000dj\u0005\'\u0000\u0000ei\b\u0012"+ - "\u0000\u0000fg\u0005\\\u0000\u0000gi\t\u0000\u0000\u0000he\u0001\u0000"+ - "\u0000\u0000hf\u0001\u0000\u0000\u0000il\u0001\u0000\u0000\u0000jh\u0001"+ - "\u0000\u0000\u0000jk\u0001\u0000\u0000\u0000km\u0001\u0000\u0000\u0000"+ - "lj\u0001\u0000\u0000\u0000mz\u0005\'\u0000\u0000nt\u0005\"\u0000\u0000"+ - "os\b\u0013\u0000\u0000pq\u0005\\\u0000\u0000qs\t\u0000\u0000\u0000ro\u0001"+ - "\u0000\u0000\u0000rp\u0001\u0000\u0000\u0000sv\u0001\u0000\u0000\u0000"+ - "tr\u0001\u0000\u0000\u0000tu\u0001\u0000\u0000\u0000uw\u0001\u0000\u0000"+ - "\u0000vt\u0001\u0000\u0000\u0000wz\u0005\"\u0000\u0000xz\u0005*\u0000"+ - "\u0000yd\u0001\u0000\u0000\u0000yn\u0001\u0000\u0000\u0000yx\u0001\u0000"+ - "\u0000\u0000z\u0018\u0001\u0000\u0000\u0000{\u007f\u0003\u0015\n\u0000"+ - "|\u007f\u0003\u0013\t\u0000}\u007f\u0005_\u0000\u0000~{\u0001\u0000\u0000"+ - "\u0000~|\u0001\u0000\u0000\u0000~}\u0001\u0000\u0000\u0000\u007f\u0080"+ - "\u0001\u0000\u0000\u0000\u0080~\u0001\u0000\u0000\u0000\u0080\u0081\u0001"+ - "\u0000\u0000\u0000\u0081\u001a\u0001\u0000\u0000\u0000\u0082\u0088\u0005"+ - "`\u0000\u0000\u0083\u0087\b\u0014\u0000\u0000\u0084\u0085\u0005`\u0000"+ - "\u0000\u0085\u0087\u0005`\u0000\u0000\u0086\u0083\u0001\u0000\u0000\u0000"+ - "\u0086\u0084\u0001\u0000\u0000\u0000\u0087\u008a\u0001\u0000\u0000\u0000"+ - "\u0088\u0086\u0001\u0000\u0000\u0000\u0088\u0089\u0001\u0000\u0000\u0000"+ - "\u0089\u008b\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000\u0000\u0000"+ - "\u008b\u008c\u0005`\u0000\u0000\u008c\u001c\u0001\u0000\u0000\u0000\u008d"+ - "\u008e\u0005-\u0000\u0000\u008e\u008f\u0005-\u0000\u0000\u008f\u0093\u0001"+ - "\u0000\u0000\u0000\u0090\u0092\b\u0015\u0000\u0000\u0091\u0090\u0001\u0000"+ - "\u0000\u0000\u0092\u0095\u0001\u0000\u0000\u0000\u0093\u0091\u0001\u0000"+ - "\u0000\u0000\u0093\u0094\u0001\u0000\u0000\u0000\u0094\u0097\u0001\u0000"+ - "\u0000\u0000\u0095\u0093\u0001\u0000\u0000\u0000\u0096\u0098\u0005\r\u0000"+ - "\u0000\u0097\u0096\u0001\u0000\u0000\u0000\u0097\u0098\u0001\u0000\u0000"+ - "\u0000\u0098\u009a\u0001\u0000\u0000\u0000\u0099\u009b\u0005\n\u0000\u0000"+ - "\u009a\u0099\u0001\u0000\u0000\u0000\u009a\u009b\u0001\u0000\u0000\u0000"+ - "\u009b\u009c\u0001\u0000\u0000\u0000\u009c\u009d\u0006\u000e\u0000\u0000"+ - "\u009d\u001e\u0001\u0000\u0000\u0000\u009e\u009f\u0005/\u0000\u0000\u009f"+ - "\u00a0\u0005*\u0000\u0000\u00a0\u00a1\u0005*\u0000\u0000\u00a1\u00a2\u0005"+ - "/\u0000\u0000\u00a2\u00a3\u0001\u0000\u0000\u0000\u00a3\u00a4\u0006\u000f"+ - "\u0000\u0000\u00a4 \u0001\u0000\u0000\u0000\u00a5\u00a6\u0005/\u0000\u0000"+ - "\u00a6\u00a7\u0005*\u0000\u0000\u00a7\u00a8\u0001\u0000\u0000\u0000\u00a8"+ - "\u00ac\b\u0016\u0000\u0000\u00a9\u00ab\t\u0000\u0000\u0000\u00aa\u00a9"+ - "\u0001\u0000\u0000\u0000\u00ab\u00ae\u0001\u0000\u0000\u0000\u00ac\u00ad"+ - "\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000\u0000\u0000\u00ad\u00af"+ - "\u0001\u0000\u0000\u0000\u00ae\u00ac\u0001\u0000\u0000\u0000\u00af\u00b0"+ - "\u0005*\u0000\u0000\u00b0\u00b1\u0005/\u0000\u0000\u00b1\u00b2\u0001\u0000"+ - "\u0000\u0000\u00b2\u00b3\u0006\u0010\u0000\u0000\u00b3\"\u0001\u0000\u0000"+ - "\u0000\u00b4\u00b6\u0007\u0017\u0000\u0000\u00b5\u00b4\u0001\u0000\u0000"+ - "\u0000\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000"+ - "\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000"+ - "\u0000\u00b9\u00ba\u0006\u0011\u0000\u0000\u00ba$\u0001\u0000\u0000\u0000"+ - "\u000f\u0000hjrty~\u0080\u0086\u0088\u0093\u0097\u009a\u00ac\u00b7\u0001"+ - "\u0000\u0001\u0000"; - public static final ATN _ATN = - new ATNDeserializer().deserialize(_serializedATN.toCharArray()); - static { - _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; - for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { - _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); - } - } -} \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseListener.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseListener.java deleted file mode 100644 index ec47bb84eb..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseListener.java +++ /dev/null @@ -1,130 +0,0 @@ -// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0 -package io.edurt.datacap.sql.parser; -import org.antlr.v4.runtime.tree.ParseTreeListener; - -/** - * This interface defines a complete listener for a parse tree produced by - * {@link SqlBaseParser}. - */ -public interface SqlBaseListener extends ParseTreeListener { - /** - * Enter a parse tree produced by {@link SqlBaseParser#singleStatement}. - * @param ctx the parse tree - */ - void enterSingleStatement(SqlBaseParser.SingleStatementContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#singleStatement}. - * @param ctx the parse tree - */ - void exitSingleStatement(SqlBaseParser.SingleStatementContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#statement}. - * @param ctx the parse tree - */ - void enterStatement(SqlBaseParser.StatementContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#statement}. - * @param ctx the parse tree - */ - void exitStatement(SqlBaseParser.StatementContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#kafkaQueryTopicStatement}. - * @param ctx the parse tree - */ - void enterKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#kafkaQueryTopicStatement}. - * @param ctx the parse tree - */ - void exitKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#kafkaQueryConsumerStatement}. - * @param ctx the parse tree - */ - void enterKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#kafkaQueryConsumerStatement}. - * @param ctx the parse tree - */ - void exitKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#kafkaQueryStatement}. - * @param ctx the parse tree - */ - void enterKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#kafkaQueryStatement}. - * @param ctx the parse tree - */ - void exitKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#kafkaStatement}. - * @param ctx the parse tree - */ - void enterKafkaStatement(SqlBaseParser.KafkaStatementContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#kafkaStatement}. - * @param ctx the parse tree - */ - void exitKafkaStatement(SqlBaseParser.KafkaStatementContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#childPathStatement}. - * @param ctx the parse tree - */ - void enterChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#childPathStatement}. - * @param ctx the parse tree - */ - void exitChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#columnStatement}. - * @param ctx the parse tree - */ - void enterColumnStatement(SqlBaseParser.ColumnStatementContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#columnStatement}. - * @param ctx the parse tree - */ - void exitColumnStatement(SqlBaseParser.ColumnStatementContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#fromClause}. - * @param ctx the parse tree - */ - void enterFromClause(SqlBaseParser.FromClauseContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#fromClause}. - * @param ctx the parse tree - */ - void exitFromClause(SqlBaseParser.FromClauseContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#tableName}. - * @param ctx the parse tree - */ - void enterTableName(SqlBaseParser.TableNameContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#tableName}. - * @param ctx the parse tree - */ - void exitTableName(SqlBaseParser.TableNameContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#identifier}. - * @param ctx the parse tree - */ - void enterIdentifier(SqlBaseParser.IdentifierContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#identifier}. - * @param ctx the parse tree - */ - void exitIdentifier(SqlBaseParser.IdentifierContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#quotedIdentifier}. - * @param ctx the parse tree - */ - void enterQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#quotedIdentifier}. - * @param ctx the parse tree - */ - void exitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx); -} \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseParser.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseParser.java deleted file mode 100644 index ed18dea1e4..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseParser.java +++ /dev/null @@ -1,911 +0,0 @@ -// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0 -package io.edurt.datacap.sql.parser; -import org.antlr.v4.runtime.atn.*; -import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.misc.*; -import org.antlr.v4.runtime.tree.*; -import java.util.List; -import java.util.Iterator; -import java.util.ArrayList; - -@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) -public class SqlBaseParser extends Parser { - static { RuntimeMetaData.checkVersion("4.12.0", RuntimeMetaData.VERSION); } - - protected static final DFA[] _decisionToDFA; - protected static final PredictionContextCache _sharedContextCache = - new PredictionContextCache(); - public static final int - T__0=1, SHOW=2, PATHS=3, TOPICS=4, CONSUMERS=5, DATABASES=6, TABLES=7, - SELECT=8, FROM=9, STRING=10, IDENTIFIER=11, BACKQUOTED_IDENTIFIER=12, - SIMPLE_COMMENT=13, BRACKETED_EMPTY_COMMENT=14, BRACKETED_COMMENT=15, WS=16; - public static final int - RULE_singleStatement = 0, RULE_statement = 1, RULE_kafkaQueryTopicStatement = 2, - RULE_kafkaQueryConsumerStatement = 3, RULE_kafkaQueryStatement = 4, RULE_kafkaStatement = 5, - RULE_childPathStatement = 6, RULE_columnStatement = 7, RULE_fromClause = 8, - RULE_tableName = 9, RULE_identifier = 10, RULE_quotedIdentifier = 11; - private static String[] makeRuleNames() { - return new String[] { - "singleStatement", "statement", "kafkaQueryTopicStatement", "kafkaQueryConsumerStatement", - "kafkaQueryStatement", "kafkaStatement", "childPathStatement", "columnStatement", - "fromClause", "tableName", "identifier", "quotedIdentifier" - }; - } - public static final String[] ruleNames = makeRuleNames(); - - private static String[] makeLiteralNames() { - return new String[] { - null, "'.'", null, null, "'TOPICS'", "'CONSUMERS'", null, null, null, - null, null, null, null, null, "'/**/'" - }; - } - private static final String[] _LITERAL_NAMES = makeLiteralNames(); - private static String[] makeSymbolicNames() { - return new String[] { - null, null, "SHOW", "PATHS", "TOPICS", "CONSUMERS", "DATABASES", "TABLES", - "SELECT", "FROM", "STRING", "IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", - "BRACKETED_EMPTY_COMMENT", "BRACKETED_COMMENT", "WS" - }; - } - private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); - public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); - - /** - * @deprecated Use {@link #VOCABULARY} instead. - */ - @Deprecated - public static final String[] tokenNames; - static { - tokenNames = new String[_SYMBOLIC_NAMES.length]; - for (int i = 0; i < tokenNames.length; i++) { - tokenNames[i] = VOCABULARY.getLiteralName(i); - if (tokenNames[i] == null) { - tokenNames[i] = VOCABULARY.getSymbolicName(i); - } - - if (tokenNames[i] == null) { - tokenNames[i] = ""; - } - } - } - - @Override - @Deprecated - public String[] getTokenNames() { - return tokenNames; - } - - @Override - - public Vocabulary getVocabulary() { - return VOCABULARY; - } - - @Override - public String getGrammarFileName() { return "SqlBase.g4"; } - - @Override - public String[] getRuleNames() { return ruleNames; } - - @Override - public String getSerializedATN() { return _serializedATN; } - - @Override - public ATN getATN() { return _ATN; } - - public SqlBaseParser(TokenStream input) { - super(input); - _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); - } - - @SuppressWarnings("CheckReturnValue") - public static class SingleStatementContext extends ParserRuleContext { - public List statement() { - return getRuleContexts(StatementContext.class); - } - public StatementContext statement(int i) { - return getRuleContext(StatementContext.class,i); - } - public SingleStatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_singleStatement; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterSingleStatement(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitSingleStatement(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitSingleStatement(this); - else return visitor.visitChildren(this); - } - } - - public final SingleStatementContext singleStatement() throws RecognitionException { - SingleStatementContext _localctx = new SingleStatementContext(_ctx, getState()); - enterRule(_localctx, 0, RULE_singleStatement); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(27); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==SHOW || _la==SELECT) { - { - { - setState(24); - statement(); - } - } - setState(29); - _errHandler.sync(this); - _la = _input.LA(1); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class StatementContext extends ParserRuleContext { - public TerminalNode SHOW() { return getToken(SqlBaseParser.SHOW, 0); } - public ChildPathStatementContext childPathStatement() { - return getRuleContext(ChildPathStatementContext.class,0); - } - public TerminalNode SELECT() { return getToken(SqlBaseParser.SELECT, 0); } - public ColumnStatementContext columnStatement() { - return getRuleContext(ColumnStatementContext.class,0); - } - public FromClauseContext fromClause() { - return getRuleContext(FromClauseContext.class,0); - } - public KafkaStatementContext kafkaStatement() { - return getRuleContext(KafkaStatementContext.class,0); - } - public StatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_statement; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterStatement(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitStatement(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitStatement(this); - else return visitor.visitChildren(this); - } - } - - public final StatementContext statement() throws RecognitionException { - StatementContext _localctx = new StatementContext(_ctx, getState()); - enterRule(_localctx, 2, RULE_statement); - try { - setState(37); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(30); - match(SHOW); - setState(31); - childPathStatement(); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(32); - match(SELECT); - setState(33); - columnStatement(); - setState(34); - fromClause(); - } - break; - case 3: - enterOuterAlt(_localctx, 3); - { - setState(36); - kafkaStatement(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class KafkaQueryTopicStatementContext extends ParserRuleContext { - public TerminalNode SHOW() { return getToken(SqlBaseParser.SHOW, 0); } - public TerminalNode TOPICS() { return getToken(SqlBaseParser.TOPICS, 0); } - public TerminalNode DATABASES() { return getToken(SqlBaseParser.DATABASES, 0); } - public KafkaQueryTopicStatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_kafkaQueryTopicStatement; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterKafkaQueryTopicStatement(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitKafkaQueryTopicStatement(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitKafkaQueryTopicStatement(this); - else return visitor.visitChildren(this); - } - } - - public final KafkaQueryTopicStatementContext kafkaQueryTopicStatement() throws RecognitionException { - KafkaQueryTopicStatementContext _localctx = new KafkaQueryTopicStatementContext(_ctx, getState()); - enterRule(_localctx, 4, RULE_kafkaQueryTopicStatement); - try { - setState(43); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(39); - match(SHOW); - setState(40); - match(TOPICS); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(41); - match(SHOW); - setState(42); - match(DATABASES); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class KafkaQueryConsumerStatementContext extends ParserRuleContext { - public TerminalNode SHOW() { return getToken(SqlBaseParser.SHOW, 0); } - public TerminalNode CONSUMERS() { return getToken(SqlBaseParser.CONSUMERS, 0); } - public FromClauseContext fromClause() { - return getRuleContext(FromClauseContext.class,0); - } - public TerminalNode TABLES() { return getToken(SqlBaseParser.TABLES, 0); } - public KafkaQueryConsumerStatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_kafkaQueryConsumerStatement; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterKafkaQueryConsumerStatement(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitKafkaQueryConsumerStatement(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitKafkaQueryConsumerStatement(this); - else return visitor.visitChildren(this); - } - } - - public final KafkaQueryConsumerStatementContext kafkaQueryConsumerStatement() throws RecognitionException { - KafkaQueryConsumerStatementContext _localctx = new KafkaQueryConsumerStatementContext(_ctx, getState()); - enterRule(_localctx, 6, RULE_kafkaQueryConsumerStatement); - try { - setState(55); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(45); - match(SHOW); - setState(46); - match(CONSUMERS); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(47); - match(SHOW); - setState(48); - match(CONSUMERS); - setState(49); - fromClause(); - } - break; - case 3: - enterOuterAlt(_localctx, 3); - { - setState(50); - match(SHOW); - setState(51); - match(TABLES); - } - break; - case 4: - enterOuterAlt(_localctx, 4); - { - setState(52); - match(SHOW); - setState(53); - match(TABLES); - setState(54); - fromClause(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class KafkaQueryStatementContext extends ParserRuleContext { - public KafkaQueryTopicStatementContext kafkaQueryTopicStatement() { - return getRuleContext(KafkaQueryTopicStatementContext.class,0); - } - public KafkaQueryConsumerStatementContext kafkaQueryConsumerStatement() { - return getRuleContext(KafkaQueryConsumerStatementContext.class,0); - } - public KafkaQueryStatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_kafkaQueryStatement; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterKafkaQueryStatement(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitKafkaQueryStatement(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitKafkaQueryStatement(this); - else return visitor.visitChildren(this); - } - } - - public final KafkaQueryStatementContext kafkaQueryStatement() throws RecognitionException { - KafkaQueryStatementContext _localctx = new KafkaQueryStatementContext(_ctx, getState()); - enterRule(_localctx, 8, RULE_kafkaQueryStatement); - try { - setState(59); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(57); - kafkaQueryTopicStatement(); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(58); - kafkaQueryConsumerStatement(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class KafkaStatementContext extends ParserRuleContext { - public KafkaQueryStatementContext kafkaQueryStatement() { - return getRuleContext(KafkaQueryStatementContext.class,0); - } - public KafkaStatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_kafkaStatement; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterKafkaStatement(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitKafkaStatement(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitKafkaStatement(this); - else return visitor.visitChildren(this); - } - } - - public final KafkaStatementContext kafkaStatement() throws RecognitionException { - KafkaStatementContext _localctx = new KafkaStatementContext(_ctx, getState()); - enterRule(_localctx, 10, RULE_kafkaStatement); - try { - enterOuterAlt(_localctx, 1); - { - setState(61); - kafkaQueryStatement(); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class ChildPathStatementContext extends ParserRuleContext { - public TerminalNode PATHS() { return getToken(SqlBaseParser.PATHS, 0); } - public FromClauseContext fromClause() { - return getRuleContext(FromClauseContext.class,0); - } - public ChildPathStatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_childPathStatement; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterChildPathStatement(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitChildPathStatement(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitChildPathStatement(this); - else return visitor.visitChildren(this); - } - } - - public final ChildPathStatementContext childPathStatement() throws RecognitionException { - ChildPathStatementContext _localctx = new ChildPathStatementContext(_ctx, getState()); - enterRule(_localctx, 12, RULE_childPathStatement); - try { - setState(66); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(63); - match(PATHS); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(64); - match(PATHS); - setState(65); - fromClause(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class ColumnStatementContext extends ParserRuleContext { - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } - public ColumnStatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_columnStatement; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterColumnStatement(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitColumnStatement(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitColumnStatement(this); - else return visitor.visitChildren(this); - } - } - - public final ColumnStatementContext columnStatement() throws RecognitionException { - ColumnStatementContext _localctx = new ColumnStatementContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_columnStatement); - try { - enterOuterAlt(_localctx, 1); - { - setState(68); - identifier(); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class FromClauseContext extends ParserRuleContext { - public TerminalNode FROM() { return getToken(SqlBaseParser.FROM, 0); } - public TableNameContext tableName() { - return getRuleContext(TableNameContext.class,0); - } - public FromClauseContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_fromClause; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFromClause(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFromClause(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitFromClause(this); - else return visitor.visitChildren(this); - } - } - - public final FromClauseContext fromClause() throws RecognitionException { - FromClauseContext _localctx = new FromClauseContext(_ctx, getState()); - enterRule(_localctx, 16, RULE_fromClause); - try { - enterOuterAlt(_localctx, 1); - { - setState(70); - match(FROM); - setState(71); - tableName(); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class TableNameContext extends ParserRuleContext { - public List identifier() { - return getRuleContexts(IdentifierContext.class); - } - public IdentifierContext identifier(int i) { - return getRuleContext(IdentifierContext.class,i); - } - public TableNameContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_tableName; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterTableName(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitTableName(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitTableName(this); - else return visitor.visitChildren(this); - } - } - - public final TableNameContext tableName() throws RecognitionException { - TableNameContext _localctx = new TableNameContext(_ctx, getState()); - enterRule(_localctx, 18, RULE_tableName); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(73); - identifier(); - setState(78); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==T__0) { - { - { - setState(74); - match(T__0); - setState(75); - identifier(); - } - } - setState(80); - _errHandler.sync(this); - _la = _input.LA(1); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class IdentifierContext extends ParserRuleContext { - public List IDENTIFIER() { return getTokens(SqlBaseParser.IDENTIFIER); } - public TerminalNode IDENTIFIER(int i) { - return getToken(SqlBaseParser.IDENTIFIER, i); - } - public List STRING() { return getTokens(SqlBaseParser.STRING); } - public TerminalNode STRING(int i) { - return getToken(SqlBaseParser.STRING, i); - } - public List quotedIdentifier() { - return getRuleContexts(QuotedIdentifierContext.class); - } - public QuotedIdentifierContext quotedIdentifier(int i) { - return getRuleContext(QuotedIdentifierContext.class,i); - } - public IdentifierContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_identifier; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterIdentifier(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitIdentifier(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitIdentifier(this); - else return visitor.visitChildren(this); - } - } - - public final IdentifierContext identifier() throws RecognitionException { - IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_identifier); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(86); - _errHandler.sync(this); - _la = _input.LA(1); - while ((((_la) & ~0x3f) == 0 && ((1L << _la) & 7168L) != 0)) { - { - setState(84); - _errHandler.sync(this); - switch (_input.LA(1)) { - case IDENTIFIER: - { - setState(81); - match(IDENTIFIER); - } - break; - case STRING: - { - setState(82); - match(STRING); - } - break; - case BACKQUOTED_IDENTIFIER: - { - setState(83); - quotedIdentifier(); - } - break; - default: - throw new NoViableAltException(this); - } - } - setState(88); - _errHandler.sync(this); - _la = _input.LA(1); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - @SuppressWarnings("CheckReturnValue") - public static class QuotedIdentifierContext extends ParserRuleContext { - public TerminalNode BACKQUOTED_IDENTIFIER() { return getToken(SqlBaseParser.BACKQUOTED_IDENTIFIER, 0); } - public QuotedIdentifierContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_quotedIdentifier; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterQuotedIdentifier(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitQuotedIdentifier(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitQuotedIdentifier(this); - else return visitor.visitChildren(this); - } - } - - public final QuotedIdentifierContext quotedIdentifier() throws RecognitionException { - QuotedIdentifierContext _localctx = new QuotedIdentifierContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_quotedIdentifier); - try { - enterOuterAlt(_localctx, 1); - { - setState(89); - match(BACKQUOTED_IDENTIFIER); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static final String _serializedATN = - "\u0004\u0001\u0010\\\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0001"+ - "\u0000\u0005\u0000\u001a\b\u0000\n\u0000\f\u0000\u001d\t\u0000\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0003\u0001&\b\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0003\u0002,\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0003\u00038\b\u0003\u0001\u0004\u0001\u0004\u0003\u0004<\b\u0004\u0001"+ - "\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006C\b"+ - "\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t"+ - "\u0001\t\u0005\tM\b\t\n\t\f\tP\t\t\u0001\n\u0001\n\u0001\n\u0005\nU\b"+ - "\n\n\n\f\nX\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0000\u0000\f\u0000"+ - "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0000\u0000\\\u0000"+ - "\u001b\u0001\u0000\u0000\u0000\u0002%\u0001\u0000\u0000\u0000\u0004+\u0001"+ - "\u0000\u0000\u0000\u00067\u0001\u0000\u0000\u0000\b;\u0001\u0000\u0000"+ - "\u0000\n=\u0001\u0000\u0000\u0000\fB\u0001\u0000\u0000\u0000\u000eD\u0001"+ - "\u0000\u0000\u0000\u0010F\u0001\u0000\u0000\u0000\u0012I\u0001\u0000\u0000"+ - "\u0000\u0014V\u0001\u0000\u0000\u0000\u0016Y\u0001\u0000\u0000\u0000\u0018"+ - "\u001a\u0003\u0002\u0001\u0000\u0019\u0018\u0001\u0000\u0000\u0000\u001a"+ - "\u001d\u0001\u0000\u0000\u0000\u001b\u0019\u0001\u0000\u0000\u0000\u001b"+ - "\u001c\u0001\u0000\u0000\u0000\u001c\u0001\u0001\u0000\u0000\u0000\u001d"+ - "\u001b\u0001\u0000\u0000\u0000\u001e\u001f\u0005\u0002\u0000\u0000\u001f"+ - "&\u0003\f\u0006\u0000 !\u0005\b\u0000\u0000!\"\u0003\u000e\u0007\u0000"+ - "\"#\u0003\u0010\b\u0000#&\u0001\u0000\u0000\u0000$&\u0003\n\u0005\u0000"+ - "%\u001e\u0001\u0000\u0000\u0000% \u0001\u0000\u0000\u0000%$\u0001\u0000"+ - "\u0000\u0000&\u0003\u0001\u0000\u0000\u0000\'(\u0005\u0002\u0000\u0000"+ - "(,\u0005\u0004\u0000\u0000)*\u0005\u0002\u0000\u0000*,\u0005\u0006\u0000"+ - "\u0000+\'\u0001\u0000\u0000\u0000+)\u0001\u0000\u0000\u0000,\u0005\u0001"+ - "\u0000\u0000\u0000-.\u0005\u0002\u0000\u0000.8\u0005\u0005\u0000\u0000"+ - "/0\u0005\u0002\u0000\u000001\u0005\u0005\u0000\u000018\u0003\u0010\b\u0000"+ - "23\u0005\u0002\u0000\u000038\u0005\u0007\u0000\u000045\u0005\u0002\u0000"+ - "\u000056\u0005\u0007\u0000\u000068\u0003\u0010\b\u00007-\u0001\u0000\u0000"+ - "\u00007/\u0001\u0000\u0000\u000072\u0001\u0000\u0000\u000074\u0001\u0000"+ - "\u0000\u00008\u0007\u0001\u0000\u0000\u00009<\u0003\u0004\u0002\u0000"+ - ":<\u0003\u0006\u0003\u0000;9\u0001\u0000\u0000\u0000;:\u0001\u0000\u0000"+ - "\u0000<\t\u0001\u0000\u0000\u0000=>\u0003\b\u0004\u0000>\u000b\u0001\u0000"+ - "\u0000\u0000?C\u0005\u0003\u0000\u0000@A\u0005\u0003\u0000\u0000AC\u0003"+ - "\u0010\b\u0000B?\u0001\u0000\u0000\u0000B@\u0001\u0000\u0000\u0000C\r"+ - "\u0001\u0000\u0000\u0000DE\u0003\u0014\n\u0000E\u000f\u0001\u0000\u0000"+ - "\u0000FG\u0005\t\u0000\u0000GH\u0003\u0012\t\u0000H\u0011\u0001\u0000"+ - "\u0000\u0000IN\u0003\u0014\n\u0000JK\u0005\u0001\u0000\u0000KM\u0003\u0014"+ - "\n\u0000LJ\u0001\u0000\u0000\u0000MP\u0001\u0000\u0000\u0000NL\u0001\u0000"+ - "\u0000\u0000NO\u0001\u0000\u0000\u0000O\u0013\u0001\u0000\u0000\u0000"+ - "PN\u0001\u0000\u0000\u0000QU\u0005\u000b\u0000\u0000RU\u0005\n\u0000\u0000"+ - "SU\u0003\u0016\u000b\u0000TQ\u0001\u0000\u0000\u0000TR\u0001\u0000\u0000"+ - "\u0000TS\u0001\u0000\u0000\u0000UX\u0001\u0000\u0000\u0000VT\u0001\u0000"+ - "\u0000\u0000VW\u0001\u0000\u0000\u0000W\u0015\u0001\u0000\u0000\u0000"+ - "XV\u0001\u0000\u0000\u0000YZ\u0005\f\u0000\u0000Z\u0017\u0001\u0000\u0000"+ - "\u0000\t\u001b%+7;BNTV"; - public static final ATN _ATN = - new ATNDeserializer().deserialize(_serializedATN.toCharArray()); - static { - _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; - for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { - _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); - } - } -} \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseVisitor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseVisitor.java deleted file mode 100644 index af88b64f76..0000000000 --- a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/parser/SqlBaseVisitor.java +++ /dev/null @@ -1,85 +0,0 @@ -// Generated from io/edurt/datacap/sql/parser/SqlBase.g4 by ANTLR 4.12.0 -package io.edurt.datacap.sql.parser; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; - -/** - * This interface defines a complete generic visitor for a parse tree produced - * by {@link SqlBaseParser}. - * - * @param The return type of the visit operation. Use {@link Void} for - * operations with no return type. - */ -public interface SqlBaseVisitor extends ParseTreeVisitor { - /** - * Visit a parse tree produced by {@link SqlBaseParser#singleStatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitSingleStatement(SqlBaseParser.SingleStatementContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitStatement(SqlBaseParser.StatementContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#kafkaQueryTopicStatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitKafkaQueryTopicStatement(SqlBaseParser.KafkaQueryTopicStatementContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#kafkaQueryConsumerStatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitKafkaQueryConsumerStatement(SqlBaseParser.KafkaQueryConsumerStatementContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#kafkaQueryStatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitKafkaQueryStatement(SqlBaseParser.KafkaQueryStatementContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#kafkaStatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitKafkaStatement(SqlBaseParser.KafkaStatementContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#childPathStatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitChildPathStatement(SqlBaseParser.ChildPathStatementContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#columnStatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitColumnStatement(SqlBaseParser.ColumnStatementContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#fromClause}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFromClause(SqlBaseParser.FromClauseContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#tableName}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTableName(SqlBaseParser.TableNameContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#identifier}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitIdentifier(SqlBaseParser.IdentifierContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#quotedIdentifier}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx); -} \ No newline at end of file diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ExpressionProcessor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ExpressionProcessor.java new file mode 100644 index 0000000000..2bc491d6e7 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ExpressionProcessor.java @@ -0,0 +1,117 @@ +package io.edurt.datacap.sql.processor; + +import io.edurt.datacap.sql.node.Expression; +import io.edurt.datacap.sql.parser.SqlBaseBaseVisitor; +import io.edurt.datacap.sql.parser.SqlBaseParser; + +import java.util.ArrayList; +import java.util.List; + +public class ExpressionProcessor + extends SqlBaseBaseVisitor +{ + @Override + public Expression visitAndExpression(SqlBaseParser.AndExpressionContext ctx) + { + Expression expr = new Expression(); + expr.setType(Expression.ExpressionType.BINARY_OP); + expr.setValue("AND"); + + List children = new ArrayList<>(); + children.add(visit(ctx.expression(0))); + children.add(visit(ctx.expression(1))); + expr.setChildren(children); + + return expr; + } + + @Override + public Expression visitOrExpression(SqlBaseParser.OrExpressionContext ctx) + { + Expression expr = new Expression(); + expr.setType(Expression.ExpressionType.BINARY_OP); + expr.setValue("OR"); + + List children = new ArrayList<>(); + children.add(visit(ctx.expression(0))); + children.add(visit(ctx.expression(1))); + expr.setChildren(children); + + return expr; + } + + @Override + public Expression visitComparisonExpression(SqlBaseParser.ComparisonExpressionContext ctx) + { + Expression expr = new Expression(); + expr.setType(Expression.ExpressionType.BINARY_OP); + expr.setValue(ctx.comparisonOperator().getText()); + + List children = new ArrayList<>(); + children.add(visit(ctx.expression(0))); + children.add(visit(ctx.expression(1))); + expr.setChildren(children); + + return expr; + } + + @Override + public Expression visitColumnReferencePrimary(SqlBaseParser.ColumnReferencePrimaryContext ctx) + { + Expression expr = new Expression(); + expr.setType(Expression.ExpressionType.COLUMN_REFERENCE); + expr.setValue(ctx.columnReference().getText()); + return expr; + } + + @Override + public Expression visitLiteralPrimary(SqlBaseParser.LiteralPrimaryContext ctx) + { + Expression expr = new Expression(); + expr.setType(Expression.ExpressionType.LITERAL); + expr.setValue(ctx.literal().getText()); + return expr; + } + + @Override + public Expression visitParenExpression(SqlBaseParser.ParenExpressionContext ctx) + { + return visit(ctx.expression()); + } + + @Override + public Expression visitFunctionCallPrimary(SqlBaseParser.FunctionCallPrimaryContext ctx) + { + Expression expr = new Expression(); + + // 检查是否是 VERSION 函数 + // Check if it is a VERSION function + if (ctx.functionCall().VERSION() != null) { + expr.setType(Expression.ExpressionType.FUNCTION); + expr.setValue("VERSION"); + return expr; + } + + expr.setType(Expression.ExpressionType.FUNCTION); + expr.setValue(ctx.functionCall().functionName().getText()); + + // 直接获取函数参数的文本表示,而不是创建子表达式 + // Directly get the text representation of function parameters, instead of creating child expressions + if (ctx.functionCall().expression() != null && !ctx.functionCall().expression().isEmpty()) { + SqlBaseParser.ExpressionContext firstArg = ctx.functionCall().expression(0); + String columnRef = firstArg.getText(); + + // 创建一个单独的 COLUMN_REFERENCE 表达式 + // Create a separate COLUMN_REFERENCE expression + Expression columnExpr = new Expression(); + columnExpr.setType(Expression.ExpressionType.COLUMN_REFERENCE); + columnExpr.setValue(columnRef); + + List args = new ArrayList<>(); + args.add(columnExpr); + expr.setChildren(args); + } + + return expr; + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ShowProcessor.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ShowProcessor.java new file mode 100644 index 0000000000..3b336c67c4 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/processor/ShowProcessor.java @@ -0,0 +1,87 @@ +package io.edurt.datacap.sql.processor; + +import io.edurt.datacap.sql.parser.SqlBaseParser; +import io.edurt.datacap.sql.statement.ShowStatement; + +public class ShowProcessor +{ + private final ExpressionProcessor expressionProcessor; + + public ShowProcessor() + { + this.expressionProcessor = new ExpressionProcessor(); + } + + public ShowStatement process(SqlBaseParser.ShowStatementContext ctx) + { + ShowStatement statement = new ShowStatement(); + + if (ctx.showDatabasesStatement() != null) { + processShowDatabases(statement, ctx.showDatabasesStatement()); + } + else if (ctx.showTablesStatement() != null) { + processShowTables(statement, ctx.showTablesStatement()); + } + else if (ctx.showColumnsStatement() != null) { + processShowColumns(statement, ctx.showColumnsStatement()); + } + + return statement; + } + + private void processShowDatabases(ShowStatement statement, + SqlBaseParser.ShowDatabasesStatementContext ctx) + { + statement.setShowType(ShowStatement.ShowType.DATABASES); + if (ctx.STRING() != null) { + // Remove quotes from the pattern string + String pattern = ctx.STRING().getText(); + pattern = pattern.substring(1, pattern.length() - 1); + statement.setPattern(pattern); + } + } + + private void processShowTables(ShowStatement statement, + SqlBaseParser.ShowTablesStatementContext ctx) + { + statement.setShowType(ShowStatement.ShowType.TABLES); + + if (ctx.databaseName() != null) { + statement.setDatabaseName(ctx.databaseName().getText()); + } + + if (ctx.STRING() != null) { + // Remove quotes from the pattern string + String pattern = ctx.STRING().getText(); + pattern = pattern.substring(1, pattern.length() - 1); + statement.setPattern(pattern); + } + else if (ctx.expression() != null) { + statement.setWhereCondition(expressionProcessor.visit(ctx.expression())); + } + } + + private void processShowColumns(ShowStatement statement, + SqlBaseParser.ShowColumnsStatementContext ctx) + { + statement.setShowType(ShowStatement.ShowType.COLUMNS); + + if (ctx.tableName() != null) { + statement.setTableName(ctx.tableName().getText()); + } + + if (ctx.databaseName() != null) { + statement.setDatabaseName(ctx.databaseName().getText()); + } + + if (ctx.STRING() != null) { + // Remove quotes from the pattern string + String pattern = ctx.STRING().getText(); + pattern = pattern.substring(1, pattern.length() - 1); + statement.setPattern(pattern); + } + else if (ctx.expression() != null) { + statement.setWhereCondition(expressionProcessor.visit(ctx.expression())); + } + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SQLStatement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SQLStatement.java new file mode 100644 index 0000000000..24bff1bc31 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SQLStatement.java @@ -0,0 +1,21 @@ +package io.edurt.datacap.sql.statement; + +public abstract class SQLStatement +{ + private final StatementType type; + + public SQLStatement(StatementType type) + { + this.type = type; + } + + public StatementType getType() + { + return type; + } + + public enum StatementType + { + SELECT, INSERT, UPDATE, DELETE, CREATE, ALTER, DROP, USE, SHOW + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SelectStatement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SelectStatement.java new file mode 100644 index 0000000000..8b665db765 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/SelectStatement.java @@ -0,0 +1,32 @@ +package io.edurt.datacap.sql.statement; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import io.edurt.datacap.sql.node.clause.LimitClause; +import io.edurt.datacap.sql.node.element.OrderByElement; +import io.edurt.datacap.sql.node.element.SelectElement; +import io.edurt.datacap.sql.node.element.TableElement; +import lombok.Getter; +import lombok.Setter; + +import java.util.List; + +@Getter +@Setter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class SelectStatement + extends SQLStatement +{ + private List selectElements; + private List fromSources; + private Expression whereClause; + private List groupByElements; + private Expression havingClause; + private List orderByElements; + private LimitClause limitClause; + + public SelectStatement() + { + super(StatementType.SELECT); + } +} diff --git a/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/ShowStatement.java b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/ShowStatement.java new file mode 100644 index 0000000000..080bf2d862 --- /dev/null +++ b/core/datacap-parser/src/main/java/io/edurt/datacap/sql/statement/ShowStatement.java @@ -0,0 +1,31 @@ +package io.edurt.datacap.sql.statement; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class ShowStatement + extends SQLStatement +{ + private ShowType showType; + private String databaseName; + private String tableName; + private String pattern; + private Expression whereCondition; + + public ShowStatement() + { + super(StatementType.SHOW); + } + + public enum ShowType + { + DATABASES, + TABLES, + COLUMNS + } +} diff --git a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/KafkaParserTest.java b/core/datacap-parser/src/test/java/io/edurt/datacap/sql/KafkaParserTest.java deleted file mode 100644 index 7f2cfadff5..0000000000 --- a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/KafkaParserTest.java +++ /dev/null @@ -1,43 +0,0 @@ -package io.edurt.datacap.sql; - -import org.junit.Assert; -import org.junit.Test; - -public class KafkaParserTest -{ - private String table = "aa"; - - @Test - public void showTopic() - { - SqlBaseFormatter formatter = new SqlBaseFormatter("show topics"); - Assert.assertTrue(formatter.getParseResult().isSuccessful()); - } - - @Test - public void showConsumers() - { - SqlBaseFormatter formatter = new SqlBaseFormatter("show Consumers"); - Assert.assertTrue(formatter.getParseResult().isSuccessful()); - - formatter = new SqlBaseFormatter("show Consumers from " + table); - Assert.assertEquals(formatter.getParseResult().getTable(), table); - } - - @Test - public void showDatabases() - { - SqlBaseFormatter formatter = new SqlBaseFormatter("show databases"); - Assert.assertTrue(formatter.getParseResult().getToken().equals("SHOW")); - } - - @Test - public void showTables() - { - SqlBaseFormatter formatter = new SqlBaseFormatter("show tables"); - Assert.assertTrue(formatter.getParseResult().getChildToken().equals("TABLES")); - - formatter = new SqlBaseFormatter("show tables from " + table); - Assert.assertEquals(formatter.getParseResult().getTable(), table); - } -} diff --git a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/ShowPathsTest.java b/core/datacap-parser/src/test/java/io/edurt/datacap/sql/ShowPathsTest.java deleted file mode 100644 index b27396678e..0000000000 --- a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/ShowPathsTest.java +++ /dev/null @@ -1,26 +0,0 @@ -package io.edurt.datacap.sql; - -import org.junit.Assert; -import org.junit.Test; - -public class ShowPathsTest -{ - @Test - public void testShowPaths() - { - SqlBaseFormatter formatter = new SqlBaseFormatter("show paths"); - Assert.assertTrue(formatter.getParseResult().isSuccessful()); - - formatter = new SqlBaseFormatter("SHOW PATHS"); - Assert.assertTrue(formatter.getParseResult().isSuccessful()); - } - @Test - public void testShowPathsFrom() - { - SqlBaseFormatter formatter = new SqlBaseFormatter("show paths from aa"); - Assert.assertTrue(formatter.getParseResult().isSuccessful()); - - formatter = new SqlBaseFormatter("SHOW PATHS FROM aaa"); - Assert.assertTrue(formatter.getParseResult().isSuccessful()); - } -} \ No newline at end of file diff --git a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/SqlBaseFormatterTest.java b/core/datacap-parser/src/test/java/io/edurt/datacap/sql/SqlBaseFormatterTest.java deleted file mode 100644 index 0fb988fb41..0000000000 --- a/core/datacap-parser/src/test/java/io/edurt/datacap/sql/SqlBaseFormatterTest.java +++ /dev/null @@ -1,15 +0,0 @@ -package io.edurt.datacap.sql; - -import org.junit.Assert; -import org.junit.Test; - -public class SqlBaseFormatterTest -{ - @Test - public void getParseResult() - { - String sql = "SELECT * FROM a"; - SqlBaseFormatter formatter = new SqlBaseFormatter(sql); - Assert.assertTrue(formatter.getParseResult().isSuccessful()); - } -} \ No newline at end of file diff --git a/core/datacap-plugin/pom.xml b/core/datacap-plugin/pom.xml index 5e84068948..4e8fbde2d7 100644 --- a/core/datacap-plugin/pom.xml +++ b/core/datacap-plugin/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/core/datacap-security/pom.xml b/core/datacap-security/pom.xml index 14debfbf35..01ae7308f7 100644 --- a/core/datacap-security/pom.xml +++ b/core/datacap-security/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/core/datacap-server/pom.xml b/core/datacap-server/pom.xml index 00e4010761..db69e0f4b5 100644 --- a/core/datacap-server/pom.xml +++ b/core/datacap-server/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/core/datacap-service/pom.xml b/core/datacap-service/pom.xml index 8c8263c60d..6872fa387b 100644 --- a/core/datacap-service/pom.xml +++ b/core/datacap-service/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/core/datacap-service/src/main/java/io/edurt/datacap/service/security/UserDetailsService.java b/core/datacap-service/src/main/java/io/edurt/datacap/service/security/UserDetailsService.java index bcbefa67bf..111b910166 100644 --- a/core/datacap-service/src/main/java/io/edurt/datacap/service/security/UserDetailsService.java +++ b/core/datacap-service/src/main/java/io/edurt/datacap/service/security/UserDetailsService.java @@ -48,6 +48,7 @@ public UserDetailsService(Long id, String code, String username, String password public static UserDetailsService build(UserEntity user) { + // TODO: Add no code alert List authorities = user.getRoles().stream() .map(role -> new SimpleGrantedAuthority(role.getCode())) .collect(Collectors.toList()); diff --git a/core/datacap-spi/pom.xml b/core/datacap-spi/pom.xml index 5d6193e7b7..8de62ecd8f 100644 --- a/core/datacap-spi/pom.xml +++ b/core/datacap-spi/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/Parser.java b/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/Parser.java index ca813f0bcb..195b274763 100644 --- a/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/Parser.java +++ b/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/Parser.java @@ -1,10 +1,10 @@ package io.edurt.datacap.spi.parser; -import io.edurt.datacap.sql.SqlBase; +import io.edurt.datacap.sql.statement.SQLStatement; public interface Parser { - SqlBase getSqlBase(); + SQLStatement getStatement(); String getExecuteContext(); } diff --git a/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/SqlParser.java b/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/SqlParser.java index a93a34d626..a9f9e39764 100644 --- a/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/SqlParser.java +++ b/core/datacap-spi/src/main/java/io/edurt/datacap/spi/parser/SqlParser.java @@ -1,24 +1,22 @@ package io.edurt.datacap.spi.parser; -import io.edurt.datacap.sql.SqlBase; -import io.edurt.datacap.sql.SqlBaseFormatter; +import io.edurt.datacap.sql.SQLParser; +import io.edurt.datacap.sql.statement.SQLStatement; public class SqlParser implements Parser { private final String content; - private SqlBaseFormatter formatter; public SqlParser(String content) { this.content = content; - this.formatter = new SqlBaseFormatter(this.content); } @Override - public SqlBase getSqlBase() + public SQLStatement getStatement() { - return this.formatter.getParseResult(); + return SQLParser.parse(content.trim()); } @Override diff --git a/core/datacap-sql/pom.xml b/core/datacap-sql/pom.xml index d73b031bc0..44d449b21f 100644 --- a/core/datacap-sql/pom.xml +++ b/core/datacap-sql/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/core/datacap-ui/package.json b/core/datacap-ui/package.json index 2d77efc161..db14c0d011 100644 --- a/core/datacap-ui/package.json +++ b/core/datacap-ui/package.json @@ -1,7 +1,7 @@ { "name": "datacap-ui", "description": "DataCap console", - "version": "2024.4.0", + "version": "2024.4.1-SNAPSHOT", "private": true, "scripts": { "dev": "vite", diff --git a/docs/docs/driver/mongodb.md b/docs/docs/driver/mongodb.md new file mode 100644 index 0000000000..08123bb51d --- /dev/null +++ b/docs/docs/driver/mongodb.md @@ -0,0 +1,102 @@ +--- +title: MongoDB Driver +--- + +DataCap MongoDB Driver 主要要用于在 DataCap 中连接和操作 MongoDB 数据库。该驱动支持以下语法: + +- `SHOW ...` 语法 +- `SELECT ...` 语法 + +DataCap MongoDB Driver 适用于所有 DataCap 版本。 + +## 使用方式 + +--- + +```xml + + io.edurt.datacap + datacap-driver-mongodb + ${VERSION} + test + +``` + +`VERSION` 可以在 Maven 中央仓库中找到。 + +驱动名称:`io.edurt.datacap.driver.MongoJdbcDriver` + +支持的连接语法: + +- `jdbc:mongodb:` +- `jdbc:mongo:` +- `jdbc:mongodb+srv:` + +### 使用示例 + +- 授权用户 + +```java +Class.forName("io.edurt.datacap.driver.MongoJdbcDriver"); +Properties props = new Properties(); +props.setProperty("database", "xxxx"); +props.setProperty("user", "xxxx"); +props.setProperty("password", "xxxx"); + +String jdbcUrl = String.format("jdbc:mongodb://%s:%d", "127.0.0.1", 27017); +connection = DriverManager.getConnection(jdbcUrl, props); +``` + +- 非授权用户 + +```java +Class.forName("io.edurt.datacap.driver.MongoJdbcDriver"); +String jdbcUrl = String.format("jdbc:mongodb://%s:%d", "127.0.0.1", 27017); +connection = DriverManager.getConnection(jdbcUrl); +``` + +## SHOW 语法 + +DataCap MongoDB Driver 支持以下 SHOW 语法: + +- `SHOW DATABASES` +- `SHOW DATABASES LIKE ...` +- `SHOW TABLES` +- `SHOW TABLES FROM ...` +- `SHOW TABLES LIKE ...` +- `SHOW COLUMNS` +- `SHOW COLUMNS FROM ...` +- `SHOW COLUMNS FROM ... FROM ...` +- `SHOW COLUMNS FROM ... LIKE ...` + +## SELECT 语法 + +DataCap MongoDB Driver 支持以下 SELECT 语法: + +- `SELECT * FROM ...` +- `SELECT ... FROM ...` +- `SELECT column_name AS alias_name FROM ...` +- `SELECT column_name AS alias_name, ... FROM ...` +- `SELECT column_name AS alias_name, ... FROM ... WHERE ...` +- `SELECT column_name AS alias_name, ... FROM ... WHERE ... ORDER BY ...` +- `SELECT column_name AS alias_name, ... FROM ... WHERE ... ORDER BY ... LIMIT ...` +- `SELECT column_name AS alias_name, ... FROM ... WHERE ... GROUP BY ...` +- `SELECT column_name AS alias_name, ... FROM ... WHERE ... GROUP BY ... LIMIT ... OFFSET ...` +- `SELECT column_name AS alias_name, SUM(columnName) ... FROM ... WHERE ... GROUP BY ...` + +### 聚合函数 + +DataCap MongoDB Driver 支持以下聚合函数: + +- `COUNT(*)` +- `COUNT(columnName)` +- `SUM(columnName)` +- `AVG(columnName)` +- `MIN(columnName)` +- `MAX(columnName)` + +## 系统函数 + +DataCap MongoDB Driver 支持以下系统函数: + +- `SELECT VERSION()` diff --git a/docs/docs/release/latest.en.md b/docs/docs/release/latest.en.md index 348218fb03..814972ee19 100644 --- a/docs/docs/release/latest.en.md +++ b/docs/docs/release/latest.en.md @@ -1,12 +1,19 @@ **DataCap Released!** -| Release Version | Published | -|:--------:|:------------:| -| `2024.4` | `2024-12-02` | +| Release Version | Published | +|:---------------:|:------------:| +| `2024.4.0` | `2024-12-02` | !!! note -This is a brand new version that uses a new plugin management system, new APIs, and other new features. This update is a new version and is not compatible with the previous version. Make a backup of your data before upgrading to avoid data loss. The database is compatible, as long as the upgraded SQL is executed. + This is a brand new version that uses a new plugin management system, new APIs, and other new features. This update is a new version and is not compatible with the previous version. Make a backup of your data before upgrading to avoid data loss. The database is compatible, as long as the upgraded SQL is executed.
+ It should also be noted that after upgrading the version, you need to modify the value of the 'code' field of the 'datacap_user' and 'datacap_role' tables, which is unique for each piece of data, otherwise it will cause you to be unable to log in. (If it is a clean installation, you can ignore this step)
+ Execute the following SQL statement to upgrade the database:
+ ```sql + INSERT INTO `datacap_menu` VALUES + (18,'全局 - 商店','STORE','','/store','',3,'VIEW',0,1,'common.store','Store',NULL,'2024-11-05 21:18:28',0,0,NULL); + INSERT INTO `datacap_role_menu_relation` VALUES ('1','18') + ``` #### Key features @@ -24,5 +31,4 @@ This is a brand new version that uses a new plugin management system, new APIs, --- -- Added Open API documentation -- \ No newline at end of file +- Added Open API documentation \ No newline at end of file diff --git a/docs/docs/release/latest.md b/docs/docs/release/latest.md index 7d5b96afa9..47d109e44d 100644 --- a/docs/docs/release/latest.md +++ b/docs/docs/release/latest.md @@ -1,12 +1,19 @@ **DataCap 发布!** -| 发布版本 | 发布时间 | -|:--------:|:------------:| -| `2024.4` | `2024-12-02` | +| 发布版本 | 发布时间 | +|:----------:|:------------:| +| `2024.4.0` | `2024-12-02` | !!! note - 本版本是一个全新的版本,完全使用了新的插件管理系统,新的 API 等各种新特性。本次更新为全新的版本,不兼容之前的版本。升级前要做好数据备份,以免数据丢失。数据库是兼容的,只要执行升级的 SQL 就可以了。 + 本版本是一个全新的版本,完全使用了新的插件管理系统,新的 API 等各种新特性。本次更新为全新的版本,不兼容之前的版本。升级前要做好数据备份,以免数据丢失。数据库是兼容的,只要执行升级的 SQL 就可以了。
+ 还需要注意的是升级版本后,要修改 `datacap_user` 和 `datacap_role` 表的 `code` 字段的值每条数据唯一即可,否则会导致无法登录。(如果是全新安装可忽略这个步骤)
+ 执行以下 SQL 语句升级数据库:
+ ```sql + INSERT INTO `datacap_menu` VALUES + (18,'全局 - 商店','STORE','','/store','',3,'VIEW',0,1,'common.store','Store',NULL,'2024-11-05 21:18:28',0,0,NULL); + INSERT INTO `datacap_role_menu_relation` VALUES ('1','18') + ``` #### 主要功能 diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 1336cb210b..9bcf8f59cd 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -134,6 +134,7 @@ plugins: ApiPlugin: 插件 API ApiDashboard: 仪表盘 API ApiMenu: 菜单 API + NavDriver: 驱动 - locale: en name: English build: true @@ -158,6 +159,7 @@ plugins: ApiPlugin: Plugin API ApiDashboard: Dashboard API ApiMenu: Menu API + NavDriver: Driver - search - git-revision-date-localized: enable_creation_date: true @@ -309,5 +311,7 @@ nav: - api/menu/list.md - api/menu/save.md - api/menu/edit.md + - NavDriver: + - driver/mongodb.md - useCases.md - partners.md diff --git a/driver/datacap-driver-mongo/pom.xml b/driver/datacap-driver-mongo/pom.xml index 317be37d4c..f0debc95d7 100644 --- a/driver/datacap-driver-mongo/pom.xml +++ b/driver/datacap-driver-mongo/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/driver/datacap-driver-mongodb/pom.xml b/driver/datacap-driver-mongodb/pom.xml new file mode 100644 index 0000000000..f7adf59e28 --- /dev/null +++ b/driver/datacap-driver-mongodb/pom.xml @@ -0,0 +1,31 @@ + + + 4.0.0 + + io.edurt.datacap + datacap + 2024.4.1-SNAPSHOT + ../../pom.xml + + + datacap-driver-mongodb + DataCap - MongoDB - Driver + + + + org.mongodb + mongodb-driver-sync + + + io.edurt.datacap + datacap-parser + + + ch.qos.logback + logback-classic + ${logback.version} + + + diff --git a/driver/datacap-driver-mongodb/src/main/java/com/dbschema/MongoJdbcDriver.java b/driver/datacap-driver-mongodb/src/main/java/com/dbschema/MongoJdbcDriver.java new file mode 100644 index 0000000000..aae7fc3a93 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/com/dbschema/MongoJdbcDriver.java @@ -0,0 +1,20 @@ +package com.dbschema; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + +import java.sql.DriverManager; +import java.sql.SQLException; + +@SuppressFBWarnings(value = {"NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"}) +public class MongoJdbcDriver + extends io.edurt.datacap.driver.MongoJdbcDriver +{ + static { + try { + DriverManager.registerDriver(new MongoJdbcDriver()); + } + catch (SQLException e) { + throw new RuntimeException("Can't register com.dbschema.MongoJdbcDriver", e); + } + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoConnection.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoConnection.java new file mode 100644 index 0000000000..042bad6f40 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoConnection.java @@ -0,0 +1,478 @@ +package io.edurt.datacap.driver; + +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoCredential; +import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoDatabase; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import lombok.Getter; + +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.NClob; +import java.sql.PreparedStatement; +import java.sql.SQLClientInfoException; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Savepoint; +import java.sql.Statement; +import java.sql.Struct; +import java.util.Collections; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Executor; + +@SuppressFBWarnings(value = {"CT_CONSTRUCTOR_THROW", "NP_NONNULL_RETURN_VIOLATION"}) +public class MongoConnection + implements Connection +{ + @Getter + private final MongoDatabase database; + private final MongoClient mongoClient; + private boolean isClosed = false; + + // Constructor to establish MongoDB connection + // 构造函数用于建立MongoDB连接 + public MongoConnection(String url, Properties info) + throws SQLException + { + try { + String databaseName = info.getProperty("database", "admin"); + + // 如果URL中包含认证信息,直接使用URL创建客户端 + // If the URL contains authentication information, create a client directly using the URL + if (url.contains("@")) { + this.mongoClient = MongoClients.create(url); + } + else { + // 否则检查Properties中的认证信息 + // Otherwise, check the authentication information in Properties + String username = info.getProperty("user"); + String password = info.getProperty("password"); + + if (username != null && password != null) { + // 创建认证凭证 + // Create authentication credentials + MongoCredential credential = MongoCredential.createCredential( + username, + databaseName, + password.toCharArray() + ); + + // 解析主机和端口 + // Parse host and port + String[] hostPort = url.split("://")[1].split(":"); + String host = hostPort[0]; + int port = hostPort.length > 1 ? Integer.parseInt(hostPort[1]) : 27017; + + // 创建带认证的客户端设置 + // Create client settings with authentication + MongoClientSettings settings = MongoClientSettings.builder() + .credential(credential) + .applyToClusterSettings(builder -> + builder.hosts(Collections.singletonList(new ServerAddress(host, port)))) + .build(); + + this.mongoClient = MongoClients.create(settings); + } + else { + // 无认证信息,直接连接 + // No authentication information, connect directly + // Remove jdbc: + this.mongoClient = MongoClients.create(url.substring(5)); + } + } + + this.database = mongoClient.getDatabase(databaseName); + + // 验证连接 + // Verify connection + database.runCommand(new org.bson.Document("ping", 1)); + } + catch (Exception e) { + throw new SQLException("Failed to connect to MongoDB: " + e.getMessage(), e); + } + } + + // Create statement for executing queries + // 创建用于执行查询的Statement + @Override + public Statement createStatement() + throws SQLException + { + checkClosed(); + return new MongoStatement(this); + } + + @Override + public PreparedStatement prepareStatement(String sql) + throws SQLException + { + return null; + } + + @Override + public CallableStatement prepareCall(String sql) + throws SQLException + { + return null; + } + + @Override + public String nativeSQL(String sql) + throws SQLException + { + return ""; + } + + // Check if connection is closed + // 检查连接是否已关闭 + private void checkClosed() + throws SQLException + { + if (isClosed) { + throw new SQLException("Connection is closed"); + } + } + + // Close the connection + // 关闭连接 + @Override + public void close() + { + if (!isClosed) { + mongoClient.close(); + isClosed = true; + } + } + + // Check if connection is closed + // 检查连接是否已关闭 + @Override + public boolean isClosed() + throws SQLException + { + return isClosed; + } + + @Override + public DatabaseMetaData getMetaData() + throws SQLException + { + throw new SQLFeatureNotSupportedException("Method not supported"); + } + + @Override + public void setReadOnly(boolean readOnly) + throws SQLException + {} + + @Override + public boolean isReadOnly() + throws SQLException + { + return false; + } + + @Override + public void setCatalog(String catalog) + throws SQLException + {} + + @Override + public String getCatalog() + throws SQLException + { + return ""; + } + + @Override + public void setTransactionIsolation(int level) + throws SQLException + {} + + @Override + public int getTransactionIsolation() + throws SQLException + { + return 0; + } + + @Override + public SQLWarning getWarnings() + throws SQLException + { + return null; + } + + @Override + public void clearWarnings() + throws SQLException + {} + + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency) + throws SQLException + { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException + { + return null; + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException + { + return null; + } + + @Override + public Map> getTypeMap() + throws SQLException + { + return Map.of(); + } + + @Override + public void setTypeMap(Map> map) + throws SQLException + {} + + @Override + public void setHoldability(int holdability) + throws SQLException + {} + + @Override + public int getHoldability() + throws SQLException + { + return 0; + } + + @Override + public Savepoint setSavepoint() + throws SQLException + { + return null; + } + + @Override + public Savepoint setSavepoint(String name) + throws SQLException + { + return null; + } + + @Override + public void rollback(Savepoint savepoint) + throws SQLException + {} + + @Override + public void releaseSavepoint(Savepoint savepoint) + throws SQLException + {} + + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException + { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException + { + return null; + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException + { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) + throws SQLException + { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) + throws SQLException + { + return null; + } + + @Override + public PreparedStatement prepareStatement(String sql, String[] columnNames) + throws SQLException + { + return null; + } + + @Override + public Clob createClob() + throws SQLException + { + return null; + } + + @Override + public Blob createBlob() + throws SQLException + { + return null; + } + + @Override + public NClob createNClob() + throws SQLException + { + return null; + } + + @Override + public SQLXML createSQLXML() + throws SQLException + { + return null; + } + + @Override + public boolean isValid(int timeout) + throws SQLException + { + return false; + } + + @Override + public void setClientInfo(String name, String value) + throws SQLClientInfoException + {} + + @Override + public void setClientInfo(Properties properties) + throws SQLClientInfoException + {} + + @Override + public String getClientInfo(String name) + throws SQLException + { + return ""; + } + + @Override + public Properties getClientInfo() + throws SQLException + { + return null; + } + + @Override + public Array createArrayOf(String typeName, Object[] elements) + throws SQLException + { + return null; + } + + @Override + public Struct createStruct(String typeName, Object[] attributes) + throws SQLException + { + return null; + } + + @Override + public void setSchema(String schema) + throws SQLException + {} + + @Override + public String getSchema() + throws SQLException + { + return ""; + } + + @Override + public void abort(Executor executor) + throws SQLException + {} + + @Override + public void setNetworkTimeout(Executor executor, int milliseconds) + throws SQLException + {} + + @Override + public int getNetworkTimeout() + throws SQLException + { + return 0; + } + + @Override + public void setAutoCommit(boolean autoCommit) + throws SQLException + { + // MongoDB doesn't support transactions in the same way as relational databases + // MongoDB 不支持与关系数据库相同的事务 + throw new UnsupportedOperationException("MongoDB doesn't support transactions in the same way as relational databases"); + } + + @Override + public boolean getAutoCommit() + throws SQLException + { + return true; + } + + @Override + public void commit() + throws SQLException + {} + + @Override + public void rollback() + throws SQLException + {} + + @Override + public T unwrap(Class iface) + throws SQLException + { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) + throws SQLException + { + return false; + } + + public MongoClient getClient() + { + return mongoClient; + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoJdbcDriver.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoJdbcDriver.java new file mode 100644 index 0000000000..b1d6261287 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoJdbcDriver.java @@ -0,0 +1,93 @@ +package io.edurt.datacap.driver; + +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Properties; +import java.util.logging.Logger; + +public class MongoJdbcDriver + implements Driver +{ + // Static initialization of driver + // 静态初始化驱动 + static { + try { + DriverManager.registerDriver(new MongoJdbcDriver()); + } + catch (SQLException e) { + throw new RuntimeException("Can't register MongoDB JDBC Driver", e); + } + } + + // Check if this driver can handle the given URL + // 检查驱动是否可以处理给定的URL + @Override + public boolean acceptsURL(String url) + throws SQLException + { + return url != null && ( + url.startsWith("jdbc:mongo:") + || url.startsWith("jdbc:mongodb:") + || url.startsWith("jdbc:mongodb+srv:") + ); + } + + // Connect to MongoDB database + // 连接MongoDB数据库 + @Override + public Connection connect(String url, Properties info) + throws SQLException + { + if (!acceptsURL(url)) { + return null; + } + + return new MongoConnection(url, info); + } + + // Get driver's major version + // 获取驱动主版本号 + @Override + public int getMajorVersion() + { + return 1; + } + + // Get driver's minor version + // 获取驱动次版本号 + @Override + public int getMinorVersion() + { + return 0; + } + + // Get driver's property info + // 获取驱动属性信息 + @Override + public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) + throws SQLException + { + return new DriverPropertyInfo[0]; + } + + // Check if driver is JDBC compliant + // 检查驱动是否符合JDBC规范 + @Override + public boolean jdbcCompliant() + { + return false; + } + + // Get parent logger + // 获取父日志记录器 + @Override + public Logger getParentLogger() + throws SQLFeatureNotSupportedException + { + throw new SQLFeatureNotSupportedException("Parent logger is not supported"); + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSet.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSet.java new file mode 100644 index 0000000000..0bfca00853 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSet.java @@ -0,0 +1,1348 @@ +package io.edurt.datacap.driver; + +import com.mongodb.client.AggregateIterable; +import com.mongodb.client.MongoCursor; +import org.bson.Document; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.nio.charset.Charset; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; +import java.util.Map; + +public class MongoResultSet + implements ResultSet +{ + private final MongoCursor cursor; + private final List columnNames; + private Document current; + private boolean isClosed = false; + private ResultSetMetaData metadata; + + // Constructor + // 构造函数 + public MongoResultSet(AggregateIterable result) + { + this.cursor = result.iterator(); + this.columnNames = new ArrayList<>(); + this.current = null; + this.metadata = null; + + // 预处理第一个文档以获取列名 + // Preprocess the first document to get the column names + if (cursor.hasNext()) { + Document first = result.first(); + if (first != null) { + columnNames.addAll(first.keySet()); + this.metadata = new MongoResultSetMetaData(columnNames, first); + this.current = first; + } + } + } + + // Move to next row + // 移动到下一行 + @Override + public boolean next() + throws SQLException + { + checkClosed(); + + if (cursor.hasNext()) { + current = cursor.next(); + return true; + } + current = null; + return false; + } + + // Get string value by column name + // 通过列名获取字符串值 + @Override + public String getString(String columnLabel) + throws SQLException + { + checkClosed(); + + if (current == null) { + throw new SQLException("No current row"); + } + Object value = current.get(columnLabel); + return value == null ? null : value.toString(); + } + + @Override + public boolean getBoolean(String columnLabel) + throws SQLException + { + checkClosed(); + + return current.getBoolean(columnLabel); + } + + @Override + public byte getByte(String columnLabel) + throws SQLException + { + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + throw new SQLException("Null value"); + } + + return String.valueOf(value).getBytes(Charset.defaultCharset())[0]; + } + + @Override + public short getShort(String columnLabel) + throws SQLException + { + return 0; + } + + // Get integer value by column name + // 通过列名获取整数值 + @Override + public int getInt(String columnLabel) + throws SQLException + { + checkClosed(); + + return current.getInteger(columnLabel); + } + + @Override + public long getLong(String columnLabel) + throws SQLException + { + checkClosed(); + + return current.getLong(columnLabel); + } + + @Override + public float getFloat(String columnLabel) + throws SQLException + { + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + throw new SQLException("Null value"); + } + if (value instanceof Number) { + return ((Number) value).floatValue(); + } + throw new SQLException("Invalid type for float column"); + } + + @Override + public double getDouble(String columnLabel) + throws SQLException + { + checkClosed(); + + return current.getDouble(columnLabel); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel, int scale) + throws SQLException + { + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + throw new SQLException("Null value"); + } + if (value instanceof Number) { + return new BigDecimal(value.toString()); + } + throw new SQLException("Invalid type for BigDecimal column"); + } + + @Override + public byte[] getBytes(String columnLabel) + throws SQLException + { + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + throw new SQLException("Null value"); + } + if (value instanceof byte[]) { + return (byte[]) value; + } + return new byte[0]; + } + + @Override + public Date getDate(String columnLabel) + throws SQLException + { + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + throw new SQLException("Null value"); + } + if (value instanceof Date) { + return Date.valueOf(String.valueOf(value)); + } + throw new SQLException("Invalid type for date column"); + } + + @Override + public Time getTime(String columnLabel) + throws SQLException + { + checkClosed(); + + Object value = current.get(columnLabel); + if (value == null) { + throw new SQLException("Null value"); + } + if (value instanceof Time) { + return Time.valueOf(String.valueOf(value)); + } + throw new SQLException("Invalid type for time column"); + } + + @Override + public Timestamp getTimestamp(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public InputStream getAsciiStream(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public InputStream getUnicodeStream(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public InputStream getBinaryStream(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public SQLWarning getWarnings() + throws SQLException + { + return null; + } + + @Override + public void clearWarnings() + throws SQLException + {} + + @Override + public String getCursorName() + throws SQLException + { + return ""; + } + + @Override + public ResultSetMetaData getMetaData() + throws SQLException + { + checkClosed(); + + return metadata; + } + + @Override + public Object getObject(int columnIndex) + throws SQLException + { + checkClosed(); + + return current.get(columnIndex); + } + + @Override + public Object getObject(String columnLabel) + throws SQLException + { + checkClosed(); + + return current.get(columnLabel); + } + + @Override + public int findColumn(String columnLabel) + throws SQLException + { + return 0; + } + + @Override + public Reader getCharacterStream(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public Reader getCharacterStream(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public BigDecimal getBigDecimal(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public boolean isBeforeFirst() + throws SQLException + { + return false; + } + + @Override + public boolean isAfterLast() + throws SQLException + { + return false; + } + + @Override + public boolean isFirst() + throws SQLException + { + return false; + } + + @Override + public boolean isLast() + throws SQLException + { + return false; + } + + @Override + public void beforeFirst() + throws SQLException + {} + + @Override + public void afterLast() + throws SQLException + {} + + @Override + public boolean first() + throws SQLException + { + return false; + } + + @Override + public boolean last() + throws SQLException + { + return false; + } + + @Override + public int getRow() + throws SQLException + { + return 0; + } + + @Override + public boolean absolute(int row) + throws SQLException + { + return false; + } + + @Override + public boolean relative(int rows) + throws SQLException + { + return false; + } + + @Override + public boolean previous() + throws SQLException + { + return false; + } + + @Override + public void setFetchDirection(int direction) + throws SQLException + {} + + @Override + public int getFetchDirection() + throws SQLException + { + return 0; + } + + @Override + public void setFetchSize(int rows) + throws SQLException + {} + + @Override + public int getFetchSize() + throws SQLException + { + return 0; + } + + @Override + public int getType() + throws SQLException + { + return 0; + } + + @Override + public int getConcurrency() + throws SQLException + { + return 0; + } + + @Override + public boolean rowUpdated() + throws SQLException + { + return false; + } + + @Override + public boolean rowInserted() + throws SQLException + { + return false; + } + + @Override + public boolean rowDeleted() + throws SQLException + { + return false; + } + + @Override + public void updateNull(int columnIndex) + throws SQLException + {} + + @Override + public void updateBoolean(int columnIndex, boolean x) + throws SQLException + {} + + @Override + public void updateByte(int columnIndex, byte x) + throws SQLException + {} + + @Override + public void updateShort(int columnIndex, short x) + throws SQLException + {} + + @Override + public void updateInt(int columnIndex, int x) + throws SQLException + {} + + @Override + public void updateLong(int columnIndex, long x) + throws SQLException + {} + + @Override + public void updateFloat(int columnIndex, float x) + throws SQLException + {} + + @Override + public void updateDouble(int columnIndex, double x) + throws SQLException + {} + + @Override + public void updateBigDecimal(int columnIndex, BigDecimal x) + throws SQLException + {} + + @Override + public void updateString(int columnIndex, String x) + throws SQLException + {} + + @Override + public void updateBytes(int columnIndex, byte[] x) + throws SQLException + {} + + @Override + public void updateDate(int columnIndex, Date x) + throws SQLException + {} + + @Override + public void updateTime(int columnIndex, Time x) + throws SQLException + {} + + @Override + public void updateTimestamp(int columnIndex, Timestamp x) + throws SQLException + {} + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, int length) + throws SQLException + {} + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, int length) + throws SQLException + {} + + @Override + public void updateCharacterStream(int columnIndex, Reader x, int length) + throws SQLException + {} + + @Override + public void updateObject(int columnIndex, Object x, int scaleOrLength) + throws SQLException + {} + + @Override + public void updateObject(int columnIndex, Object x) + throws SQLException + {} + + @Override + public void updateNull(String columnLabel) + throws SQLException + {} + + @Override + public void updateBoolean(String columnLabel, boolean x) + throws SQLException + {} + + @Override + public void updateByte(String columnLabel, byte x) + throws SQLException + {} + + @Override + public void updateShort(String columnLabel, short x) + throws SQLException + {} + + @Override + public void updateInt(String columnLabel, int x) + throws SQLException + {} + + @Override + public void updateLong(String columnLabel, long x) + throws SQLException + {} + + @Override + public void updateFloat(String columnLabel, float x) + throws SQLException + {} + + @Override + public void updateDouble(String columnLabel, double x) + throws SQLException + {} + + @Override + public void updateBigDecimal(String columnLabel, BigDecimal x) + throws SQLException + {} + + @Override + public void updateString(String columnLabel, String x) + throws SQLException + {} + + @Override + public void updateBytes(String columnLabel, byte[] x) + throws SQLException + {} + + @Override + public void updateDate(String columnLabel, Date x) + throws SQLException + {} + + @Override + public void updateTime(String columnLabel, Time x) + throws SQLException + {} + + @Override + public void updateTimestamp(String columnLabel, Timestamp x) + throws SQLException + {} + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, int length) + throws SQLException + {} + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, int length) + throws SQLException + {} + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, int length) + throws SQLException + {} + + @Override + public void updateObject(String columnLabel, Object x, int scaleOrLength) + throws SQLException + {} + + @Override + public void updateObject(String columnLabel, Object x) + throws SQLException + {} + + @Override + public void insertRow() + throws SQLException + {} + + @Override + public void updateRow() + throws SQLException + {} + + @Override + public void deleteRow() + throws SQLException + {} + + @Override + public void refreshRow() + throws SQLException + {} + + @Override + public void cancelRowUpdates() + throws SQLException + {} + + @Override + public void moveToInsertRow() + throws SQLException + {} + + @Override + public void moveToCurrentRow() + throws SQLException + {} + + @Override + public Statement getStatement() + throws SQLException + { + return null; + } + + @Override + public Object getObject(int columnIndex, Map> map) + throws SQLException + { + return null; + } + + @Override + public Ref getRef(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public Blob getBlob(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public Clob getClob(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public Array getArray(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public Object getObject(String columnLabel, Map> map) + throws SQLException + { + return null; + } + + @Override + public Ref getRef(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public Blob getBlob(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public Clob getClob(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public Array getArray(String columnLabel) + throws SQLException + { + checkClosed(); + + return (Array) current.getList(columnLabel, List.class); + } + + @Override + public Date getDate(int columnIndex, Calendar cal) + throws SQLException + { + return null; + } + + @Override + public Date getDate(String columnLabel, Calendar cal) + throws SQLException + { + return null; + } + + @Override + public Time getTime(int columnIndex, Calendar cal) + throws SQLException + { + return null; + } + + @Override + public Time getTime(String columnLabel, Calendar cal) + throws SQLException + { + return null; + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) + throws SQLException + { + return null; + } + + @Override + public Timestamp getTimestamp(String columnLabel, Calendar cal) + throws SQLException + { + return null; + } + + @Override + public URL getURL(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public URL getURL(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public void updateRef(int columnIndex, Ref x) + throws SQLException + {} + + @Override + public void updateRef(String columnLabel, Ref x) + throws SQLException + {} + + @Override + public void updateBlob(int columnIndex, Blob x) + throws SQLException + {} + + @Override + public void updateBlob(String columnLabel, Blob x) + throws SQLException + {} + + @Override + public void updateClob(int columnIndex, Clob x) + throws SQLException + {} + + @Override + public void updateClob(String columnLabel, Clob x) + throws SQLException + {} + + @Override + public void updateArray(int columnIndex, Array x) + throws SQLException + {} + + @Override + public void updateArray(String columnLabel, Array x) + throws SQLException + {} + + @Override + public RowId getRowId(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public RowId getRowId(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public void updateRowId(int columnIndex, RowId x) + throws SQLException + {} + + @Override + public void updateRowId(String columnLabel, RowId x) + throws SQLException + {} + + @Override + public int getHoldability() + throws SQLException + { + return 0; + } + + @Override + public boolean isClosed() + throws SQLException + { + return false; + } + + @Override + public void updateNString(int columnIndex, String nString) + throws SQLException + {} + + @Override + public void updateNString(String columnLabel, String nString) + throws SQLException + {} + + @Override + public void updateNClob(int columnIndex, NClob nClob) + throws SQLException + {} + + @Override + public void updateNClob(String columnLabel, NClob nClob) + throws SQLException + {} + + @Override + public NClob getNClob(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public NClob getNClob(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public SQLXML getSQLXML(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public SQLXML getSQLXML(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public void updateSQLXML(int columnIndex, SQLXML xmlObject) + throws SQLException + {} + + @Override + public void updateSQLXML(String columnLabel, SQLXML xmlObject) + throws SQLException + {} + + @Override + public String getNString(int columnIndex) + throws SQLException + { + return ""; + } + + @Override + public String getNString(String columnLabel) + throws SQLException + { + return ""; + } + + @Override + public Reader getNCharacterStream(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public Reader getNCharacterStream(String columnLabel) + throws SQLException + { + return null; + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x, long length) + throws SQLException + {} + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException + {} + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, long length) + throws SQLException + {} + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, long length) + throws SQLException + {} + + @Override + public void updateCharacterStream(int columnIndex, Reader x, long length) + throws SQLException + {} + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, long length) + throws SQLException + {} + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, long length) + throws SQLException + {} + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException + {} + + @Override + public void updateBlob(int columnIndex, InputStream inputStream, long length) + throws SQLException + {} + + @Override + public void updateBlob(String columnLabel, InputStream inputStream, long length) + throws SQLException + {} + + @Override + public void updateClob(int columnIndex, Reader reader, long length) + throws SQLException + {} + + @Override + public void updateClob(String columnLabel, Reader reader, long length) + throws SQLException + {} + + @Override + public void updateNClob(int columnIndex, Reader reader, long length) + throws SQLException + {} + + @Override + public void updateNClob(String columnLabel, Reader reader, long length) + throws SQLException + {} + + @Override + public void updateNCharacterStream(int columnIndex, Reader x) + throws SQLException + {} + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader) + throws SQLException + {} + + @Override + public void updateAsciiStream(int columnIndex, InputStream x) + throws SQLException + {} + + @Override + public void updateBinaryStream(int columnIndex, InputStream x) + throws SQLException + {} + + @Override + public void updateCharacterStream(int columnIndex, Reader x) + throws SQLException + {} + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) + throws SQLException + {} + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) + throws SQLException + {} + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) + throws SQLException + {} + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) + throws SQLException + {} + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) + throws SQLException + {} + + @Override + public void updateClob(int columnIndex, Reader reader) + throws SQLException + {} + + @Override + public void updateClob(String columnLabel, Reader reader) + throws SQLException + {} + + @Override + public void updateNClob(int columnIndex, Reader reader) + throws SQLException + {} + + @Override + public void updateNClob(String columnLabel, Reader reader) + throws SQLException + {} + + @Override + public T getObject(int columnIndex, Class type) + throws SQLException + { + return null; + } + + @Override + public T getObject(String columnLabel, Class type) + throws SQLException + { + return null; + } + + // Check if result set is closed + // 检查结果集是否已关闭 + private void checkClosed() + throws SQLException + { + if (isClosed) { + throw new SQLException("ResultSet is closed"); + } + } + + // Close the result set + // 关闭结果集 + @Override + public void close() + throws SQLException + { + if (!isClosed) { + cursor.close(); + isClosed = true; + } + } + + @Override + public boolean wasNull() + throws SQLException + { + return false; + } + + @Override + public String getString(int columnIndex) + throws SQLException + { + checkClosed(); + + String columnName = getColumnName(columnIndex); + Object value = current.get(columnName); + if (value == null) { + return null; + } + + return value.toString(); + } + + @Override + public boolean getBoolean(int columnIndex) + throws SQLException + { + checkClosed(); + + return current.getBoolean(getColumnName(columnIndex)); + } + + @Override + public byte getByte(int columnIndex) + throws SQLException + { + return 0; + } + + @Override + public short getShort(int columnIndex) + throws SQLException + { + return 0; + } + + @Override + public int getInt(int columnIndex) + throws SQLException + { + checkClosed(); + + return current.getInteger(getColumnName(columnIndex)); + } + + @Override + public long getLong(int columnIndex) + throws SQLException + { + checkClosed(); + + return current.getLong(getColumnName(columnIndex)); + } + + @Override + public float getFloat(int columnIndex) + throws SQLException + { + checkClosed(); + + Object value = current.get(getColumnName(columnIndex)); + if (value == null) { + return 0; + } + if (value instanceof Number) { + return Float.parseFloat(value.toString()); + } + + throw new SQLException("Invalid type for float column"); + } + + @Override + public double getDouble(int columnIndex) + throws SQLException + { + checkClosed(); + + return current.getDouble(getColumnName(columnIndex)); + } + + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) + throws SQLException + { + return null; + } + + @Override + public byte[] getBytes(int columnIndex) + throws SQLException + { + return new byte[0]; + } + + @Override + public Date getDate(int columnIndex) + throws SQLException + { + checkClosed(); + + return Date.valueOf(current.get(getColumnName(columnIndex)).toString()); + } + + @Override + public Time getTime(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public Timestamp getTimestamp(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public InputStream getAsciiStream(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public InputStream getUnicodeStream(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public InputStream getBinaryStream(int columnIndex) + throws SQLException + { + return null; + } + + @Override + public T unwrap(Class iface) + throws SQLException + { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) + throws SQLException + { + return false; + } + + private String getColumnName(int columnIndex) + { + if (columnIndex < 1 || columnIndex > columnNames.size()) { + throw new IllegalArgumentException("Invalid column index: " + columnIndex); + } + return columnNames.get(columnIndex - 1); + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSetMetaData.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSetMetaData.java new file mode 100644 index 0000000000..298f116cab --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoResultSetMetaData.java @@ -0,0 +1,237 @@ +package io.edurt.datacap.driver; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.bson.Document; + +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.List; +import java.util.Map; + +@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2"}) +public class MongoResultSetMetaData + implements ResultSetMetaData +{ + private final List columnNames; + private final Map sampleRow; + + // Constructor + // 构造函数 + public MongoResultSetMetaData(List columnNames, Document sampleRow) + { + this.columnNames = columnNames; + this.sampleRow = sampleRow; + } + + // Get number of columns + // 获取列数 + @Override + public int getColumnCount() + throws SQLException + { + return columnNames.size(); + } + + // Get column name + // 获取列名 + @Override + public String getColumnName(int column) + throws SQLException + { + checkColumnIndex(column); + return columnNames.get(column - 1); + } + + // Get column label + // 获取列标签 + @Override + public String getColumnLabel(int column) + throws SQLException + { + return getColumnName(column); + } + + // Get column type + // 获取列类型 + @Override + public int getColumnType(int column) + throws SQLException + { + checkColumnIndex(column); + + String columnName = columnNames.get(column - 1); + Object value = sampleRow.get(columnName); + return MongoTypeHelper.getJdbcType(value); + } + + // Get column type name + // 获取列类型名称 + @Override + public String getColumnTypeName(int column) + throws SQLException + { + checkColumnIndex(column); + + String columnName = columnNames.get(column - 1); + Object value = sampleRow.get(columnName); + return MongoTypeHelper.getTypeName(value); + } + + // Get column class name + // 获取列的Java类名 + @Override + public String getColumnClassName(int column) + throws SQLException + { + checkColumnIndex(column); + + String columnName = columnNames.get(column - 1); + Object value = sampleRow.get(columnName); + return MongoTypeHelper.getJavaClassName(value); + } + + // Check if column is nullable + // 检查列是否可为空 + @Override + public int isNullable(int column) + throws SQLException + { + return columnNullable; + } + + // Check if column is auto increment + // 检查列是否自动递增 + @Override + public boolean isAutoIncrement(int column) + throws SQLException + { + String columnName = getColumnName(column); + return columnName.equals("_id"); + } + + // Check if column is case sensitive + // 检查列是否大小写敏感 + @Override + public boolean isCaseSensitive(int column) + throws SQLException + { + return getColumnType(column) == Types.VARCHAR; + } + + // Check if column is searchable + // 检查列是否可搜索 + @Override + public boolean isSearchable(int column) + throws SQLException + { + return true; + } + + // Check if column is currency + // 检查列是否货币类型 + @Override + public boolean isCurrency(int column) + throws SQLException + { + return false; + } + + // Validate column index + // 验证列索引 + private void checkColumnIndex(int column) + throws SQLException + { + if (column < 1 || column > columnNames.size()) { + throw new SQLException("Invalid column index: " + column); + } + } + + @Override + public boolean isSigned(int column) + throws SQLException + { + int type = getColumnType(column); + return type == Types.INTEGER || type == Types.BIGINT || type == Types.DOUBLE; + } + + @Override + public int getColumnDisplaySize(int column) + throws SQLException + { + return 0; + } + + @Override + public int getPrecision(int column) + throws SQLException + { + return 0; + } + + @Override + public int getScale(int column) + throws SQLException + { + return 0; + } + + @Override + public String getTableName(int column) + throws SQLException + { + return ""; + } + + @Override + public String getSchemaName(int column) + throws SQLException + { + return ""; + } + + @Override + public String getCatalogName(int column) + throws SQLException + { + return ""; + } + + @Override + public boolean isReadOnly(int column) + throws SQLException + { + return false; + } + + @Override + public boolean isWritable(int column) + throws SQLException + { + return true; + } + + @Override + public boolean isDefinitelyWritable(int column) + throws SQLException + { + return true; + } + + @Override + public T unwrap(Class iface) + throws SQLException + { + if (iface.isAssignableFrom(getClass())) { + return iface.cast(this); + } + throw new SQLException("Cannot unwrap to " + iface.getName()); + } + + @Override + public boolean isWrapperFor(Class iface) + throws SQLException + { + return iface.isAssignableFrom(getClass()); + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoStatement.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoStatement.java new file mode 100644 index 0000000000..8c9e722860 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoStatement.java @@ -0,0 +1,476 @@ +package io.edurt.datacap.driver; + +import com.mongodb.client.AggregateIterable; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.driver.iterable.InMemoryAggregateIterable; +import io.edurt.datacap.driver.parser.MongoParser; +import io.edurt.datacap.driver.parser.MongoShowParser; +import lombok.extern.slf4j.Slf4j; +import org.bson.Document; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; + +@Slf4j +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "NP_NULL_PARAM_DEREF"}) +public class MongoStatement + implements Statement +{ + private final MongoConnection connection; + private boolean isClosed = false; + + // Constructor + // 构造函数 + public MongoStatement(MongoConnection connection) + { + this.connection = connection; + } + + // Execute query and return ResultSet + // 执行查询并返回ResultSet + @Override + public ResultSet executeQuery(String sql) + throws SQLException + { + checkClosed(); + + try { + // Parse SQL to MongoDB query + MongoParser parser = MongoParser.createParser(sql); + if (parser instanceof MongoShowParser) { + return executeShowStatement((MongoShowParser) parser); + } + + Document query = parser.getQuery(); + if (query.containsKey("buildInfo")) { + Document buildInfo = connection.getDatabase() + .runCommand(new Document("buildInfo", 1)); + + Document versionDoc = new Document(); + versionDoc.put("version", buildInfo.getString("version")); + return new MongoResultSet(new InMemoryAggregateIterable(List.of(versionDoc))); + } + + String collectionName = parser.getCollection(); + log.debug("Executing query: {}", query); + + String[] dbAndTb = parser.getCollection().split("\\."); + MongoDatabase db = connection.getDatabase(); + if (dbAndTb.length > 1) { + db = connection.getClient().getDatabase(dbAndTb[0]); + collectionName = dbAndTb[1]; + } + + MongoCollection collection = db.getCollection(collectionName); + + // Execute aggregate command + @SuppressWarnings("unchecked") + List pipeline = (List) query.get("pipeline"); + AggregateIterable result = collection.aggregate(pipeline); + + return new MongoResultSet(result); + } + catch (Exception e) { + throw new SQLException("Failed to execute query", e); + } + } + + private ResultSet executeShowStatement(MongoShowParser parser) + throws SQLException + { + try { + switch (parser.getShowType()) { + case DATABASES: + return handleShowDatabases(parser); + case TABLES: + return handleShowTables(parser); + case COLUMNS: + return handleShowColumns(parser); + default: + throw new SQLException("Unsupported SHOW command type"); + } + } + catch (Exception e) { + throw new SQLException("Failed to execute SHOW command", e); + } + } + + private ResultSet handleShowDatabases(MongoShowParser parser) + { + List docs = connection.getClient().listDatabaseNames() + .map(name -> new Document("name", name)) + .into(new ArrayList<>()); + return new MongoResultSet(new InMemoryAggregateIterable(docs)); + } + + private ResultSet handleShowTables(MongoShowParser parser) + { + MongoDatabase db = parser.getDatabase() != null ? + connection.getClient().getDatabase(parser.getDatabase()) : + connection.getDatabase(); + + List docs = db.listCollectionNames() + .map(name -> new Document("name", name)) + .into(new ArrayList<>()); + return new MongoResultSet(new InMemoryAggregateIterable(docs)); + } + + private ResultSet handleShowColumns(MongoShowParser parser) + { + String[] dbAndTb = parser.getCollection().split("\\."); + String database = parser.getDatabase(); + String table = parser.getCollection(); + if (database == null && dbAndTb.length == 2) { + database = dbAndTb[0]; + table = dbAndTb[1]; + } + + MongoDatabase db = connection.getClient().getDatabase(database); + + Document sample = db.getCollection(table) + .find() + .limit(1) + .first(); + + List docs = new ArrayList<>(); + if (sample != null) { + sample.keySet().forEach(field -> + docs.add(new Document("name", field)) + ); + } + return new MongoResultSet(new InMemoryAggregateIterable(docs)); + } + + private boolean matchesPattern(String value, String pattern) + { + if (pattern == null) { + return true; + } + return value.matches(pattern.replace("%", ".*")); + } + + private String getMongoFieldType(Object value) + { + if (value == null) { + return "null"; + } + if (value instanceof String) { + return "string"; + } + if (value instanceof Integer) { + return "int"; + } + if (value instanceof Long) { + return "long"; + } + if (value instanceof Double) { + return "double"; + } + if (value instanceof Boolean) { + return "boolean"; + } + if (value instanceof Document) { + return "document"; + } + if (value instanceof List) { + return "array"; + } + return value.getClass().getSimpleName(); + } + + // Execute update statement + // 执行更新语句 + @Override + public int executeUpdate(String sql) + throws SQLException + { + throw new UnsupportedOperationException("Update operation not supported"); + } + + // Check if statement is closed + // 检查语句是否已关闭 + private void checkClosed() + throws SQLException + { + if (isClosed) { + throw new SQLException("Statement is closed"); + } + } + + // Close the statement + // 关闭语句 + @Override + public void close() + throws SQLException + { + isClosed = true; + } + + @Override + public int getMaxFieldSize() + throws SQLException + { + return 0; + } + + @Override + public void setMaxFieldSize(int max) + throws SQLException + {} + + @Override + public int getMaxRows() + throws SQLException + { + return 0; + } + + @Override + public void setMaxRows(int max) + throws SQLException + {} + + @Override + public void setEscapeProcessing(boolean enable) + throws SQLException + {} + + @Override + public int getQueryTimeout() + throws SQLException + { + return 0; + } + + @Override + public void setQueryTimeout(int seconds) + throws SQLException + {} + + @Override + public void cancel() + throws SQLException + {} + + @Override + public SQLWarning getWarnings() + throws SQLException + { + return null; + } + + @Override + public void clearWarnings() + throws SQLException + {} + + @Override + public void setCursorName(String name) + throws SQLException + {} + + @Override + public boolean execute(String sql) + throws SQLException + { + return false; + } + + @Override + public ResultSet getResultSet() + throws SQLException + { + return null; + } + + @Override + public int getUpdateCount() + throws SQLException + { + return 0; + } + + @Override + public boolean getMoreResults() + throws SQLException + { + return false; + } + + @Override + public void setFetchDirection(int direction) + throws SQLException + {} + + @Override + public int getFetchDirection() + throws SQLException + { + return 0; + } + + @Override + public void setFetchSize(int rows) + throws SQLException + {} + + @Override + public int getFetchSize() + throws SQLException + { + return 0; + } + + @Override + public int getResultSetConcurrency() + throws SQLException + { + return 0; + } + + @Override + public int getResultSetType() + throws SQLException + { + return 0; + } + + @Override + public void addBatch(String sql) + throws SQLException + {} + + @Override + public void clearBatch() + throws SQLException + {} + + @Override + public int[] executeBatch() + throws SQLException + { + return new int[0]; + } + + @Override + public Connection getConnection() + throws SQLException + { + return null; + } + + @Override + public boolean getMoreResults(int current) + throws SQLException + { + return false; + } + + @Override + public ResultSet getGeneratedKeys() + throws SQLException + { + return null; + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) + throws SQLException + { + return 0; + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) + throws SQLException + { + return 0; + } + + @Override + public int executeUpdate(String sql, String[] columnNames) + throws SQLException + { + return 0; + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) + throws SQLException + { + return false; + } + + @Override + public boolean execute(String sql, int[] columnIndexes) + throws SQLException + { + return false; + } + + @Override + public boolean execute(String sql, String[] columnNames) + throws SQLException + { + return false; + } + + @Override + public int getResultSetHoldability() + throws SQLException + { + return 0; + } + + @Override + public boolean isClosed() + throws SQLException + { + return false; + } + + @Override + public void setPoolable(boolean poolable) + throws SQLException + {} + + @Override + public boolean isPoolable() + throws SQLException + { + return false; + } + + @Override + public void closeOnCompletion() + throws SQLException + {} + + @Override + public boolean isCloseOnCompletion() + throws SQLException + { + return false; + } + + @Override + public T unwrap(Class iface) + throws SQLException + { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) + throws SQLException + { + return false; + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoTypeHelper.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoTypeHelper.java new file mode 100644 index 0000000000..f3ae9e90e6 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/MongoTypeHelper.java @@ -0,0 +1,120 @@ +package io.edurt.datacap.driver; + +import java.sql.Types; +import java.util.HashMap; +import java.util.Map; + +public class MongoTypeHelper +{ + private static final Map TYPE_MAP = new HashMap<>(); + + private MongoTypeHelper() {} + + // Get MongoDataType by class name + // 通过类名获取MongoDataType + public static MongoDataType getType(String className) + { + MongoDataType type = TYPE_MAP.get(className); + return type != null ? type : new MongoDataType(Types.OTHER, "OTHER", Object.class); + } + + // Get MongoDataType by object + // 通过对象获取MongoDataType + public static MongoDataType getType(Object value) + { + if (value == null) { + return TYPE_MAP.get("NULL"); + } + return getType(value.getClass().getSimpleName()); + } + + // Get JDBC type by class name + // 通过类名获取JDBC类型 + public static int getJdbcType(String className) + { + return getType(className).getJdbcType(); + } + + // Get JDBC type by object + // 通过对象获取JDBC类型 + public static int getJdbcType(Object value) + { + return getType(value).getJdbcType(); + } + + // Get type name by class name + // 通过类名获取类型名称 + public static String getTypeName(String className) + { + return getType(className).getTypeName(); + } + + // Get type name by object + // 通过对象获取类型名称 + public static String getTypeName(Object value) + { + return getType(value).getTypeName(); + } + + // Get Java class name by class name + // 通过类名获取Java类名 + public static String getJavaClassName(String className) + { + return getType(className).getJavaClassName(); + } + + // Get Java class name by object + // 通过对象获取Java类名 + public static String getJavaClassName(Object value) + { + return getType(value).getJavaClassName(); + } + + // Data type container class + // 数据类型容器类 + public static class MongoDataType + { + private final int jdbcType; + private final String typeName; + private final Class javaClass; + + public MongoDataType(int jdbcType, String typeName, Class javaClass) + { + this.jdbcType = jdbcType; + this.typeName = typeName; + this.javaClass = javaClass; + } + + public int getJdbcType() + { + return jdbcType; + } + + public String getTypeName() + { + return typeName; + } + + public String getJavaClassName() + { + return javaClass.getName(); + } + } + + static { + // Initialize basic types + // 初始化基本类型 + TYPE_MAP.put("String", new MongoDataType(Types.VARCHAR, "VARCHAR", String.class)); + TYPE_MAP.put("ObjectId", new MongoDataType(Types.VARCHAR, "VARCHAR", String.class)); + TYPE_MAP.put("Integer", new MongoDataType(Types.INTEGER, "INTEGER", Integer.class)); + TYPE_MAP.put("Long", new MongoDataType(Types.BIGINT, "BIGINT", Long.class)); + TYPE_MAP.put("Double", new MongoDataType(Types.DOUBLE, "DOUBLE", Double.class)); + TYPE_MAP.put("Boolean", new MongoDataType(Types.BOOLEAN, "BOOLEAN", Boolean.class)); + TYPE_MAP.put("Date", new MongoDataType(Types.TIMESTAMP, "TIMESTAMP", java.sql.Timestamp.class)); + TYPE_MAP.put("ArrayList", new MongoDataType(Types.ARRAY, "ARRAY", java.util.ArrayList.class)); + TYPE_MAP.put("Document", new MongoDataType(Types.OTHER, "OBJECT", Object.class)); + TYPE_MAP.put("Binary", new MongoDataType(Types.BINARY, "BINARY", byte[].class)); + TYPE_MAP.put("Decimal128", new MongoDataType(Types.DECIMAL, "DECIMAL", java.math.BigDecimal.class)); + TYPE_MAP.put("NULL", new MongoDataType(Types.NULL, "NULL", Object.class)); + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryAggregateIterable.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryAggregateIterable.java new file mode 100644 index 0000000000..b2bc2d98b5 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryAggregateIterable.java @@ -0,0 +1,225 @@ +package io.edurt.datacap.driver.iterable; + +import com.mongodb.ExplainVerbosity; +import com.mongodb.Function; +import com.mongodb.ServerAddress; +import com.mongodb.ServerCursor; +import com.mongodb.client.AggregateIterable; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoIterable; +import com.mongodb.client.model.Collation; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.conversions.Bson; + +import java.util.Collection; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2"}) +public class InMemoryAggregateIterable + implements AggregateIterable +{ + private final List documents; + + public InMemoryAggregateIterable(List documents) + { + this.documents = documents; + } + + @Override + public MongoCursor iterator() + { + return new InMemoryMongoCursor(documents); + } + + @Override + public MongoCursor cursor() + { + return null; + } + + // Interface implementations with minimal implementation + @Override + public AggregateIterable batchSize(int size) + { + return this; + } + + @Override + public void toCollection() + {} + + @Override + public AggregateIterable allowDiskUse(Boolean allowDiskUse) + { + return this; + } + + @Override + public AggregateIterable maxTime(long maxTime, TimeUnit timeUnit) + { + return this; + } + + @Override + public AggregateIterable maxAwaitTime(long maxAwaitTime, TimeUnit timeUnit) + { + return this; + } + + @Override + public AggregateIterable bypassDocumentValidation(Boolean bypassDocumentValidation) + { + return this; + } + + @Override + public AggregateIterable collation(Collation collation) + { + return this; + } + + @Override + public AggregateIterable comment(String comment) + { + return this; + } + + @Override + public AggregateIterable comment(BsonValue bsonValue) + { + return null; + } + + @Override + public AggregateIterable hint(Bson hint) + { + return this; + } + + @Override + public AggregateIterable hintString(String s) + { + return null; + } + + @Override + public AggregateIterable let(Bson bson) + { + return null; + } + + @Override + public Document explain() + { + return null; + } + + @Override + public Document explain(ExplainVerbosity explainVerbosity) + { + return null; + } + + @Override + public E explain(Class aClass) + { + return null; + } + + @Override + public E explain(Class aClass, ExplainVerbosity explainVerbosity) + { + return null; + } + + @Override + public void forEach(Consumer action) + { + MongoCursor cursor = iterator(); + while (cursor.hasNext()) { + action.accept(cursor.next()); + } + } + + @Override + public > A into(A target) + { + forEach(target::add); + return target; + } + + @Override + public Document first() + { + MongoCursor cursor = iterator(); + return cursor.hasNext() ? cursor.next() : null; + } + + @Override + public MongoIterable map(Function mapper) + { + throw new UnsupportedOperationException("Map operation not supported"); + } + + private static class InMemoryMongoCursor + implements MongoCursor + { + private final List results; + private int position = 0; + + public InMemoryMongoCursor(List results) + { + this.results = results; + } + + @Override + public void close() + { + // No resources to close + } + + @Override + public boolean hasNext() + { + return position < results.size(); + } + + @Override + public Document next() + { + if (!hasNext()) { + throw new NoSuchElementException(); + } + return results.get(position++); + } + + @Override + public int available() + { + return 0; + } + + @Override + public Document tryNext() + { + return hasNext() ? next() : null; + } + + @Override + public ServerCursor getServerCursor() + { + return null; + } + + @Override + public ServerAddress getServerAddress() + { + return null; + } + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryFindIterable.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryFindIterable.java new file mode 100644 index 0000000000..4d8742be40 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/iterable/InMemoryFindIterable.java @@ -0,0 +1,276 @@ +package io.edurt.datacap.driver.iterable; + +import com.mongodb.CursorType; +import com.mongodb.ExplainVerbosity; +import com.mongodb.Function; +import com.mongodb.ServerAddress; +import com.mongodb.ServerCursor; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoIterable; +import com.mongodb.client.model.Collation; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.conversions.Bson; + +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; + +@SuppressFBWarnings(value = {"NP_NONNULL_RETURN_VIOLATION", "EI_EXPOSE_REP2"}) +public class InMemoryFindIterable + implements FindIterable +{ + private final List documents; + + public InMemoryFindIterable(List documents) + { + this.documents = documents; + } + + @Override + public MongoCursor iterator() + { + return new InMemoryMongoCursor(documents); + } + + @Override + public MongoCursor cursor() + { + return null; + } + + @Override + public Document first() + { + return documents.isEmpty() ? null : documents.get(0); + } + + @Override + public MongoIterable map(Function function) + { + return null; + } + + @Override + public > A into(A objects) + { + return null; + } + + @Override + public FindIterable filter(Bson filter) + { + return this; + } + + @Override + public FindIterable limit(int limit) + { + return this; + } + + @Override + public FindIterable skip(int skip) + { + return this; + } + + @Override + public FindIterable maxTime(long maxTime, TimeUnit timeUnit) + { + return this; + } + + @Override + public FindIterable maxAwaitTime(long l, TimeUnit timeUnit) + { + return null; + } + + @Override + public FindIterable projection(Bson bson) + { + return null; + } + + @Override + public FindIterable sort(Bson bson) + { + return null; + } + + @Override + public FindIterable noCursorTimeout(boolean b) + { + return null; + } + + @Override + public FindIterable oplogReplay(boolean b) + { + return null; + } + + @Override + public FindIterable partial(boolean b) + { + return null; + } + + @Override + public FindIterable cursorType(CursorType cursorType) + { + return null; + } + + @Override + public FindIterable batchSize(int batchSize) + { + return this; + } + + @Override + public FindIterable collation(Collation collation) + { + return null; + } + + @Override + public FindIterable comment(String s) + { + return null; + } + + @Override + public FindIterable comment(BsonValue bsonValue) + { + return null; + } + + @Override + public FindIterable hint(Bson bson) + { + return null; + } + + @Override + public FindIterable hintString(String s) + { + return null; + } + + @Override + public FindIterable let(Bson bson) + { + return null; + } + + @Override + public FindIterable max(Bson bson) + { + return null; + } + + @Override + public FindIterable min(Bson bson) + { + return null; + } + + @Override + public FindIterable returnKey(boolean b) + { + return null; + } + + @Override + public FindIterable showRecordId(boolean b) + { + return null; + } + + @Override + public FindIterable allowDiskUse(Boolean aBoolean) + { + return null; + } + + @Override + public Document explain() + { + return null; + } + + @Override + public Document explain(ExplainVerbosity explainVerbosity) + { + return null; + } + + @Override + public E explain(Class aClass) + { + return null; + } + + @Override + public E explain(Class aClass, ExplainVerbosity explainVerbosity) + { + return null; + } + + private static class InMemoryMongoCursor + implements MongoCursor + { + private final List documents; + private int position = 0; + + public InMemoryMongoCursor(List documents) + { + this.documents = documents; + } + + @Override + public void close() + { + } + + @Override + public boolean hasNext() + { + return position < documents.size(); + } + + @Override + public Document next() + { + return documents.get(position++); + } + + @Override + public int available() + { + return 0; + } + + @Override + public Document tryNext() + { + return hasNext() ? next() : null; + } + + @Override + public ServerCursor getServerCursor() + { + return null; + } + + @Override + public ServerAddress getServerAddress() + { + return null; + } + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoParser.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoParser.java new file mode 100644 index 0000000000..dc49e970ac --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoParser.java @@ -0,0 +1,45 @@ +package io.edurt.datacap.driver.parser; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.SQLParser; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.SelectStatement; +import io.edurt.datacap.sql.statement.ShowStatement; +import lombok.Getter; +import lombok.Setter; +import org.bson.Document; + +import java.util.List; + +@Getter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) +public class MongoParser +{ + protected Document filter; + protected List fields; + protected String command; + protected Document query; + protected String collection; + protected ShowStatement.ShowType showType; + + @Setter + protected String database; + + // Parse SQL statement + // 解析SQL语句 + public static MongoParser createParser(String sql) + { + if (sql == null || sql.trim().isEmpty()) { + throw new IllegalArgumentException("SQL query cannot be null or empty"); + } + + SQLStatement statement = SQLParser.parse(sql.trim()); + if (statement instanceof SelectStatement) { + return new MongoSelectParser((SelectStatement) statement); + } + else if (statement instanceof ShowStatement) { + return new MongoShowParser((ShowStatement) statement); + } + throw new IllegalArgumentException("Unsupported SQL operation: " + sql); + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoSelectParser.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoSelectParser.java new file mode 100644 index 0000000000..6349a4daa7 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoSelectParser.java @@ -0,0 +1,368 @@ +package io.edurt.datacap.driver.parser; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.node.Expression; +import io.edurt.datacap.sql.node.clause.LimitClause; +import io.edurt.datacap.sql.node.element.OrderByElement; +import io.edurt.datacap.sql.node.element.SelectElement; +import io.edurt.datacap.sql.node.element.TableElement; +import io.edurt.datacap.sql.statement.SelectStatement; +import lombok.Getter; +import org.bson.Document; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +@Getter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "CT_CONSTRUCTOR_THROW"}) +public class MongoSelectParser + extends MongoParser +{ + private final Map fieldAliasMap = new HashMap<>(); + private final Map aliasToFieldMap = new HashMap<>(); + + public MongoSelectParser(SelectStatement statement) + { + parseSelectStatement(statement); + } + + // Parse SELECT statement + // 解析SELECT语句 + public void parseSelectStatement(SelectStatement select) + { + List elements = select.getSelectElements(); + if (elements != null && elements.size() == 1) { + SelectElement element = elements.get(0); + if (element.getExpression() != null && + element.getExpression().getType() == Expression.ExpressionType.FUNCTION && + "VERSION".equalsIgnoreCase(element.getExpression().getValue().toString())) { + this.query = new Document("buildInfo", 1); + return; + } + } + + // Get collection name first + parseFromClause(select.getFromSources()); + + // Parse select elements to set fields + parseSelectElements(select.getSelectElements()); + + // Initialize an aggregation pipeline + List pipeline = new ArrayList<>(); + + // Add $match stage for WHERE conditions + if (select.getWhereClause() != null) { + Object queryResult = parseExpression(select.getWhereClause()); + Document matchStage = new Document("$match", + queryResult instanceof Document ? queryResult : new Document("$eq", queryResult)); + pipeline.add(matchStage); + } + + // Add $project stage for field selection + // Add $group stage if GROUP BY exists + if (select.getGroupByElements() != null && !select.getGroupByElements().isEmpty()) { + Document groupStage = parseGroupByClause(select.getGroupByElements(), select.getSelectElements()); + pipeline.add(new Document("$group", groupStage)); + } + // If no GROUP BY, add normal $project stage + else if (fields != null && !fields.isEmpty() && + !(fields.size() == 1 && fields.get(0).equals("*"))) { + Document projectStage = new Document(); + projectStage.put("_id", 0); + + // Create field mappings in $project stage + for (SelectElement element : select.getSelectElements()) { + String originalField = element.getColumn() != null ? + element.getColumn() : + element.getExpression().getValue().toString(); + + String alias = element.getAlias(); + if (alias != null) { + projectStage.put(alias, "$" + originalField); + } + else { + projectStage.put(originalField, 1); + } + } + pipeline.add(new Document("$project", projectStage)); + } + + // Add $sort stage if ORDER BY exists + if (select.getOrderByElements() != null && !select.getOrderByElements().isEmpty()) { + Document sortStage = new Document("$sort", parseOrderByElements(select.getOrderByElements())); + pipeline.add(sortStage); + } + + // Add $skip and $limit stages if present + LimitClause limitClause = select.getLimitClause(); + if (limitClause != null) { + if (limitClause.getOffset() > 0) { + pipeline.add(new Document("$skip", (int) limitClause.getOffset())); + } + if (limitClause.getLimit() >= 0) { + pipeline.add(new Document("$limit", (int) limitClause.getLimit())); + } + } + + // Set the final query + this.query = new Document("aggregate", this.collection) + .append("pipeline", pipeline) + .append("cursor", new Document()); + } + + // Parse SELECT elements to field list + // 解析SELECT元素到字段列表 + private void parseSelectElements(List elements) + { + this.fields = new ArrayList<>(); + if (elements != null) { + for (SelectElement element : elements) { + String field; + // Get field name (from column name or expression) + if (element.getColumn() != null) { + field = element.getColumn(); + } + else if (element.getExpression() != null) { + Expression expr = element.getExpression(); + if (expr.getType() == Expression.ExpressionType.FUNCTION && + "VERSION".equalsIgnoreCase(expr.getValue().toString())) { + field = "version"; + } + else { + field = parseExpression(expr).toString(); + } + } + else { + continue; + } + + // Handle alias mapping + if (element.getAlias() != null) { + fieldAliasMap.put(field, element.getAlias()); + aliasToFieldMap.put(element.getAlias(), field); + fields.add(element.getAlias()); + } + else { + fields.add(field); + } + } + } + } + + // Parse FROM clause to get collection name + // 解析FROM子句获取集合名称 + private void parseFromClause(List fromSources) + { + if (fromSources != null && !fromSources.isEmpty()) { + TableElement mainTable = fromSources.get(0); + this.collection = mainTable.getTableName(); + + // MongoDB doesn't support JOINs + // MongoDB不支持JOIN操作 + if (mainTable.getJoins() != null && !mainTable.getJoins().isEmpty()) { + throw new IllegalArgumentException("MongoDB does not support JOIN operations"); + } + } + } + + private Object parseExpression(Expression expr) + { + if (expr == null) { + return null; + } + + switch (expr.getType()) { + case LITERAL: + return parseValue(expr.getValue().toString()); + + case COLUMN_REFERENCE: + return expr.getValue().toString(); + + case BINARY_OP: + String operator = expr.getValue().toString(); + List children = expr.getChildren(); + + // Handle logical operators (AND, OR) + if ("AND".equalsIgnoreCase(operator) || "OR".equalsIgnoreCase(operator)) { + List conditions = new ArrayList<>(); + for (Expression child : children) { + Object result = parseExpression(child); + if (result instanceof Document) { + conditions.add((Document) result); + } + } + return new Document(operator.equalsIgnoreCase("AND") ? "$and" : "$or", conditions); + } + + // Handle comparison operators + if (children != null && children.size() == 2) { + Expression left = children.get(0); + Expression right = children.get(1); + + String field = parseExpression(left).toString(); + Object value = parseExpression(right); + + Document condition = new Document(); + switch (operator) { + case "=": + condition.put(field, value); + break; + case ">": + condition.put(field, new Document("$gt", value)); + break; + case "<": + condition.put(field, new Document("$lt", value)); + break; + case ">=": + condition.put(field, new Document("$gte", value)); + break; + case "<=": + condition.put(field, new Document("$lte", value)); + break; + case "!=": + condition.put(field, new Document("$ne", value)); + break; + case "LIKE": + String pattern = value.toString().replace("%", ".*"); + condition.put(field, Pattern.compile(pattern, Pattern.CASE_INSENSITIVE)); + break; + case "IN": + condition.put(field, new Document("$in", value)); + break; + default: + throw new IllegalArgumentException("Unsupported operator: " + operator); + } + return condition; + } + + throw new IllegalArgumentException("Invalid binary expression structure"); + + case FUNCTION: + if ("VERSION".equalsIgnoreCase(expr.getValue().toString())) { + return new Document("$buildInfo", 1); + } + + throw new IllegalArgumentException("Unsupported function: " + expr.getValue()); + + default: + throw new IllegalArgumentException("Unsupported expression type: " + expr.getType()); + } + } + + // Parse ORDER BY elements to MongoDB sort document + // 解析ORDER BY元素到MongoDB排序文档 + private Document parseOrderByElements(List elements) + { + Document orderBy = new Document(); + for (OrderByElement element : elements) { + String field = element.getExpression().getValue().toString(); + orderBy.put(field, element.isAscending() ? 1 : -1); + } + return orderBy; + } + + // Parse string value to appropriate type + // 将字符串值解析为适当的类型 + private Object parseValue(String value) + { + value = value.trim(); + + // Remove quotes if present + // 如果有引号则移除 + if (value.startsWith("'") && value.endsWith("'")) { + return value.substring(1, value.length() - 1); + } + + // Try parsing as number + // 尝试解析为数字 + try { + if (value.contains(".")) { + return Double.parseDouble(value); + } + else { + return Long.parseLong(value); + } + } + catch (NumberFormatException e) { + // Return as string if not a number + // 如果不是数字则返回字符串 + return value; + } + } + + private Document parseGroupByClause(List groupByColumns, List selectElements) + { + Document groupStage = new Document(); + + // Handle _id field for grouping + if (groupByColumns.size() == 1 && groupByColumns.get(0).getValue().equals("_id")) { + groupStage.put("_id", "$" + groupByColumns.get(0).getValue()); + } + else { + // Multiple group by columns + Document idDoc = new Document(); + for (Expression expr : groupByColumns) { + String field = expr.getValue().toString(); + idDoc.put(field, "$" + field); + } + groupStage.put("_id", idDoc); + } + + // Handle aggregation functions in SELECT clause + for (SelectElement element : selectElements) { + if (element.getExpression() != null) { + Expression expr = element.getExpression(); + if (expr.getType() == Expression.ExpressionType.FUNCTION) { + String functionName = expr.getValue().toString().toUpperCase(); + String field = expr.getChildren().get(0).getValue().toString(); + String alias = element.getAlias() != null ? element.getAlias() : functionName + "_" + field; + + switch (functionName) { + case "COUNT": + groupStage.put(alias, new Document("$sum", 1)); + break; + case "SUM": + groupStage.put(alias, new Document("$sum", "$" + field)); + break; + case "AVG": + groupStage.put(alias, new Document("$avg", "$" + field)); + break; + case "MIN": + groupStage.put(alias, new Document("$min", "$" + field)); + break; + case "MAX": + groupStage.put(alias, new Document("$max", "$" + field)); + break; + default: + throw new IllegalArgumentException("Unsupported aggregation function: " + functionName); + } + } + else { + // Handle non-aggregated fields that are part of GROUP BY + String field = expr.getValue().toString(); + if (isFieldInGroupBy(field, groupByColumns)) { + groupStage.put(field, new Document("$first", "$" + field)); + } + } + } + else if (element.getColumn() != null) { + // Handle simple columns that are part of GROUP BY + String field = element.getColumn(); + if (isFieldInGroupBy(field, groupByColumns)) { + groupStage.put(field, new Document("$first", "$" + field)); + } + } + } + + return groupStage; + } + + private boolean isFieldInGroupBy(String field, List groupByColumns) + { + return groupByColumns.stream() + .anyMatch(expr -> expr.getValue().toString().equals(field)); + } +} diff --git a/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoShowParser.java b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoShowParser.java new file mode 100644 index 0000000000..1619464ca4 --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/java/io/edurt/datacap/driver/parser/MongoShowParser.java @@ -0,0 +1,76 @@ +package io.edurt.datacap.driver.parser; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.statement.ShowStatement; +import lombok.Getter; +import org.bson.Document; + +@Getter +@SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2", "CT_CONSTRUCTOR_THROW"}) +public class MongoShowParser + extends MongoParser +{ + public MongoShowParser(ShowStatement statement) + { + parseShowStatement(statement); + } + + public void parseShowStatement(ShowStatement show) + { + this.showType = show.getShowType(); + switch (show.getShowType()) { + case DATABASES: + this.command = "listDatabases"; + if (show.getPattern() != null) { + // Convert SQL LIKE pattern to MongoDB regex pattern + String pattern = convertLikeToRegex(show.getPattern()); + this.filter = new Document("name", new Document("$regex", pattern)); + } + break; + + case TABLES: + this.command = "listCollections"; + if (show.getDatabaseName() != null) { + this.database = show.getDatabaseName(); + } + if (show.getPattern() != null) { + String pattern = convertLikeToRegex(show.getPattern()); + this.filter = new Document("name", new Document("$regex", pattern)); + } + break; + + case COLUMNS: + this.command = "listFields"; + if (show.getDatabaseName() != null) { + this.database = show.getDatabaseName(); + } + if (show.getTableName() != null) { + this.collection = show.getTableName(); + } + if (show.getPattern() != null) { + String pattern = convertLikeToRegex(show.getPattern()); + this.filter = new Document("name", new Document("$regex", pattern)); + } + break; + + default: + throw new IllegalArgumentException("Unsupported SHOW type: " + show.getShowType()); + } + } + + private String convertLikeToRegex(String likePattern) + { + // Remove quotes if present + if (likePattern.startsWith("'") && likePattern.endsWith("'")) { + likePattern = likePattern.substring(1, likePattern.length() - 1); + } + else if (likePattern.startsWith("\"") && likePattern.endsWith("\"")) { + likePattern = likePattern.substring(1, likePattern.length() - 1); + } + + // Convert SQL LIKE pattern to MongoDB regex pattern + return likePattern + .replace("%", ".*") // % matches any sequence of characters + .replace("_", "."); // _ matches any single character + } +} diff --git a/driver/datacap-driver-mongodb/src/main/resources/META-INF/services/java.sql.Driver b/driver/datacap-driver-mongodb/src/main/resources/META-INF/services/java.sql.Driver new file mode 100644 index 0000000000..c2e0372e5b --- /dev/null +++ b/driver/datacap-driver-mongodb/src/main/resources/META-INF/services/java.sql.Driver @@ -0,0 +1,2 @@ +io.edurt.datacap.driver.MongoJdbcDriver +com.dbschema.MongoJdbcDriver diff --git a/driver/datacap-driver-redis/pom.xml b/driver/datacap-driver-redis/pom.xml index e9e28557c8..097cbe64f6 100644 --- a/driver/datacap-driver-redis/pom.xml +++ b/driver/datacap-driver-redis/pom.xml @@ -6,7 +6,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/executor/datacap-executor-local/pom.xml b/executor/datacap-executor-local/pom.xml index 10abf20930..84b455dbfb 100644 --- a/executor/datacap-executor-local/pom.xml +++ b/executor/datacap-executor-local/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/executor/datacap-executor-seatunnel/pom.xml b/executor/datacap-executor-seatunnel/pom.xml index bd433e705b..c4c01bf981 100644 --- a/executor/datacap-executor-seatunnel/pom.xml +++ b/executor/datacap-executor-seatunnel/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/executor/datacap-executor-spi/pom.xml b/executor/datacap-executor-spi/pom.xml index d8828f1e96..0edbf1a85b 100644 --- a/executor/datacap-executor-spi/pom.xml +++ b/executor/datacap-executor-spi/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/fs/datacap-fs-alioss/pom.xml b/fs/datacap-fs-alioss/pom.xml index 767f518da2..1d50110e00 100644 --- a/fs/datacap-fs-alioss/pom.xml +++ b/fs/datacap-fs-alioss/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/fs/datacap-fs-amazon-s3/pom.xml b/fs/datacap-fs-amazon-s3/pom.xml index d58400015d..b4feb1356f 100644 --- a/fs/datacap-fs-amazon-s3/pom.xml +++ b/fs/datacap-fs-amazon-s3/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/fs/datacap-fs-local/pom.xml b/fs/datacap-fs-local/pom.xml index 3e849f9c07..aa1ba41fe3 100644 --- a/fs/datacap-fs-local/pom.xml +++ b/fs/datacap-fs-local/pom.xml @@ -3,7 +3,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/fs/datacap-fs-minio/pom.xml b/fs/datacap-fs-minio/pom.xml index ebfef8fec9..42b9dc5dbd 100644 --- a/fs/datacap-fs-minio/pom.xml +++ b/fs/datacap-fs-minio/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/fs/datacap-fs-qiniu/pom.xml b/fs/datacap-fs-qiniu/pom.xml index a8c98f3a22..c9416e484d 100644 --- a/fs/datacap-fs-qiniu/pom.xml +++ b/fs/datacap-fs-qiniu/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/fs/datacap-fs-spi/pom.xml b/fs/datacap-fs-spi/pom.xml index af5984d2a1..a5850c1ca7 100644 --- a/fs/datacap-fs-spi/pom.xml +++ b/fs/datacap-fs-spi/pom.xml @@ -3,7 +3,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/fs/datacap-fs-tencent-cos/pom.xml b/fs/datacap-fs-tencent-cos/pom.xml index 7d26ca6e48..fbc58e624d 100644 --- a/fs/datacap-fs-tencent-cos/pom.xml +++ b/fs/datacap-fs-tencent-cos/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/lib/datacap-http/pom.xml b/lib/datacap-http/pom.xml index 73e745f396..4d2664a4c1 100644 --- a/lib/datacap-http/pom.xml +++ b/lib/datacap-http/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/lib/datacap-logger/pom.xml b/lib/datacap-logger/pom.xml index 595ec05d35..871f589fd0 100644 --- a/lib/datacap-logger/pom.xml +++ b/lib/datacap-logger/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/lib/datacap-schedule/pom.xml b/lib/datacap-schedule/pom.xml index 9f0f500e71..d6d577078f 100644 --- a/lib/datacap-schedule/pom.xml +++ b/lib/datacap-schedule/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/lib/datacap-shell/pom.xml b/lib/datacap-shell/pom.xml index 3bb03912bf..4700590da1 100644 --- a/lib/datacap-shell/pom.xml +++ b/lib/datacap-shell/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/notify/datacap-notify-dingtalk/pom.xml b/notify/datacap-notify-dingtalk/pom.xml index 1e7f5c743f..b13836e147 100644 --- a/notify/datacap-notify-dingtalk/pom.xml +++ b/notify/datacap-notify-dingtalk/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/notify/datacap-notify-spi/pom.xml b/notify/datacap-notify-spi/pom.xml index a1205e2b49..b217779a68 100644 --- a/notify/datacap-notify-spi/pom.xml +++ b/notify/datacap-notify-spi/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/parser/datacap-parser-mysql/pom.xml b/parser/datacap-parser-mysql/pom.xml index 55c4d038c0..fe1eb9c24a 100644 --- a/parser/datacap-parser-mysql/pom.xml +++ b/parser/datacap-parser-mysql/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/parser/datacap-parser-spi/pom.xml b/parser/datacap-parser-spi/pom.xml index e6db254e90..22729ab176 100644 --- a/parser/datacap-parser-spi/pom.xml +++ b/parser/datacap-parser-spi/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/parser/datacap-parser-trino/pom.xml b/parser/datacap-parser-trino/pom.xml index 12a596c0aa..8b2f43afc3 100644 --- a/parser/datacap-parser-trino/pom.xml +++ b/parser/datacap-parser-trino/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-alioss/pom.xml b/plugin/datacap-plugin-alioss/pom.xml index 0adcf56993..f83665a5c5 100644 --- a/plugin/datacap-plugin-alioss/pom.xml +++ b/plugin/datacap-plugin-alioss/pom.xml @@ -6,7 +6,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossAdapter.java b/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossAdapter.java index f60df371e0..42739f71a4 100644 --- a/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossAdapter.java +++ b/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossAdapter.java @@ -2,20 +2,20 @@ import com.aliyun.oss.OSS; import com.aliyun.oss.model.ObjectListing; -import com.google.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.edurt.datacap.spi.adapter.NativeAdapter; import io.edurt.datacap.spi.model.Configure; import io.edurt.datacap.spi.model.Response; import io.edurt.datacap.spi.model.Time; -import io.edurt.datacap.sql.SqlBase; +import io.edurt.datacap.sql.node.element.SelectElement; +import io.edurt.datacap.sql.statement.SelectStatement; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.ObjectUtils; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.stream.Collectors; @Slf4j @SuppressFBWarnings(value = {"RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE", "REC_CATCH_EXCEPTION"}, @@ -45,24 +45,22 @@ public Response handlerExecute(String content) List types = new ArrayList<>(); List columns = new ArrayList<>(); try { - SqlBase sqlBase = this.parser.getSqlBase(); - if (sqlBase.isSuccessful()) { - OSS client = this.aliossConnection.getOssClient(); - if (ObjectUtils.isNotEmpty(this.parser.getSqlBase().getColumns())) { - headers.addAll(this.parser.getSqlBase().getColumns()); - } - else { - headers.add("*"); - } - types.add("String"); - ObjectListing objectListing = client.listObjects(configure.getDatabase().get(), sqlBase.getTable()); - objectListing.getObjectSummaries() - .forEach(column -> columns.add(Collections.singletonList(column.getKey()))); - response.setIsSuccessful(Boolean.TRUE); + SelectStatement statement = (SelectStatement) this.parser.getStatement(); + OSS client = this.aliossConnection.getOssClient(); + if (!statement.getSelectElements().isEmpty()) { + headers.addAll(statement.getSelectElements() + .stream() + .map(SelectElement::getColumn) + .collect(Collectors.toList())); } else { - Preconditions.checkArgument(true, sqlBase.getMessage()); + headers.add("*"); } + types.add("String"); + ObjectListing objectListing = client.listObjects(configure.getDatabase().get(), statement.getFromSources().get(0).getTableName()); + objectListing.getObjectSummaries() + .forEach(column -> columns.add(Collections.singletonList(column.getKey()))); + response.setIsSuccessful(Boolean.TRUE); } catch (Exception ex) { log.error("Execute content failed content {} exception ", content, ex); diff --git a/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossParser.java b/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossParser.java index 4c610c869a..38ce72eac2 100644 --- a/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossParser.java +++ b/plugin/datacap-plugin-alioss/src/main/java/io/edurt/datacap/plugin/natived/alioss/AliossParser.java @@ -1,7 +1,9 @@ package io.edurt.datacap.plugin.natived.alioss; import io.edurt.datacap.spi.parser.SqlParser; -import io.edurt.datacap.sql.SqlBase; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.SelectStatement; +import io.edurt.datacap.sql.statement.ShowStatement; public class AliossParser extends SqlParser @@ -14,13 +16,18 @@ public AliossParser(String content) @Override public String getExecuteContext() { - SqlBase sqlBase = this.getSqlBase(); - if (sqlBase.getToken().equalsIgnoreCase("SHOW")) { - return sqlBase.getTable(); + SQLStatement statement = this.getStatement(); + + if (statement instanceof SelectStatement) { + SelectStatement selectStatement = (SelectStatement) statement; + return selectStatement.getFromSources().get(0).getTableName(); + } + else if (statement instanceof ShowStatement) { + ShowStatement showStatement = (ShowStatement) statement; + return showStatement.getTableName(); } - else if (sqlBase.getToken().equalsIgnoreCase("SELECT")) { - return sqlBase.getTable(); + else { + throw new RuntimeException("Unsupported statement: " + statement); } - return null; } } diff --git a/plugin/datacap-plugin-cassandra/pom.xml b/plugin/datacap-plugin-cassandra/pom.xml index 468c67fe8f..8cced68c98 100644 --- a/plugin/datacap-plugin-cassandra/pom.xml +++ b/plugin/datacap-plugin-cassandra/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-ceresdb/pom.xml b/plugin/datacap-plugin-ceresdb/pom.xml index 7b99b18a0d..864c225083 100644 --- a/plugin/datacap-plugin-ceresdb/pom.xml +++ b/plugin/datacap-plugin-ceresdb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-clickhouse-http/pom.xml b/plugin/datacap-plugin-clickhouse-http/pom.xml index c9505de1e6..5e87dd178d 100644 --- a/plugin/datacap-plugin-clickhouse-http/pom.xml +++ b/plugin/datacap-plugin-clickhouse-http/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-clickhouse/pom.xml b/plugin/datacap-plugin-clickhouse/pom.xml index 4f84a62f15..5067526b6b 100644 --- a/plugin/datacap-plugin-clickhouse/pom.xml +++ b/plugin/datacap-plugin-clickhouse/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-cratedb-http/pom.xml b/plugin/datacap-plugin-cratedb-http/pom.xml index c3b4f4d5ad..04e34e76e1 100644 --- a/plugin/datacap-plugin-cratedb-http/pom.xml +++ b/plugin/datacap-plugin-cratedb-http/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-cratedb/pom.xml b/plugin/datacap-plugin-cratedb/pom.xml index 894badd3fb..459987da22 100644 --- a/plugin/datacap-plugin-cratedb/pom.xml +++ b/plugin/datacap-plugin-cratedb/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-db2/pom.xml b/plugin/datacap-plugin-db2/pom.xml index b224311f30..ff30bf9ff2 100644 --- a/plugin/datacap-plugin-db2/pom.xml +++ b/plugin/datacap-plugin-db2/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-dm/pom.xml b/plugin/datacap-plugin-dm/pom.xml index c192ce0da7..c86270d709 100644 --- a/plugin/datacap-plugin-dm/pom.xml +++ b/plugin/datacap-plugin-dm/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-doris/pom.xml b/plugin/datacap-plugin-doris/pom.xml index 2e5ed95404..a7dee5a386 100644 --- a/plugin/datacap-plugin-doris/pom.xml +++ b/plugin/datacap-plugin-doris/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-dremio/pom.xml b/plugin/datacap-plugin-dremio/pom.xml index ef7d674424..af1334e579 100644 --- a/plugin/datacap-plugin-dremio/pom.xml +++ b/plugin/datacap-plugin-dremio/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-druid/pom.xml b/plugin/datacap-plugin-druid/pom.xml index 867ca17150..2af8f7dd9e 100644 --- a/plugin/datacap-plugin-druid/pom.xml +++ b/plugin/datacap-plugin-druid/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-duckdb/pom.xml b/plugin/datacap-plugin-duckdb/pom.xml index 914ccba408..9683265dbf 100644 --- a/plugin/datacap-plugin-duckdb/pom.xml +++ b/plugin/datacap-plugin-duckdb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-elasticsearch-8x/pom.xml b/plugin/datacap-plugin-elasticsearch-8x/pom.xml index 56406395ce..17afd8aaf1 100644 --- a/plugin/datacap-plugin-elasticsearch-8x/pom.xml +++ b/plugin/datacap-plugin-elasticsearch-8x/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-greptimedb/pom.xml b/plugin/datacap-plugin-greptimedb/pom.xml index bdb9093a41..60daf15072 100644 --- a/plugin/datacap-plugin-greptimedb/pom.xml +++ b/plugin/datacap-plugin-greptimedb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-h2-tcp/pom.xml b/plugin/datacap-plugin-h2-tcp/pom.xml index b45536abad..793ce29360 100644 --- a/plugin/datacap-plugin-h2-tcp/pom.xml +++ b/plugin/datacap-plugin-h2-tcp/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-h2/pom.xml b/plugin/datacap-plugin-h2/pom.xml index 84817dae4e..837c4d8d6c 100644 --- a/plugin/datacap-plugin-h2/pom.xml +++ b/plugin/datacap-plugin-h2/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-hdfs/pom.xml b/plugin/datacap-plugin-hdfs/pom.xml index 0485e80f36..e2416b1803 100644 --- a/plugin/datacap-plugin-hdfs/pom.xml +++ b/plugin/datacap-plugin-hdfs/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-hdfs/src/main/kotlin/io/edurt/datacap/plugin/HdfsAdapter.kt b/plugin/datacap-plugin-hdfs/src/main/kotlin/io/edurt/datacap/plugin/HdfsAdapter.kt index e527a703e0..e4b692a2db 100644 --- a/plugin/datacap-plugin-hdfs/src/main/kotlin/io/edurt/datacap/plugin/HdfsAdapter.kt +++ b/plugin/datacap-plugin-hdfs/src/main/kotlin/io/edurt/datacap/plugin/HdfsAdapter.kt @@ -6,9 +6,9 @@ import io.edurt.datacap.spi.model.Configure import io.edurt.datacap.spi.model.Response import io.edurt.datacap.spi.model.Time import io.edurt.datacap.spi.parser.SqlParser -import io.edurt.datacap.sql.SqlBase -import io.edurt.datacap.sql.SqlBaseToken -import org.apache.commons.lang3.ObjectUtils +import io.edurt.datacap.sql.statement.SQLStatement +import io.edurt.datacap.sql.statement.SelectStatement +import io.edurt.datacap.sql.statement.ShowStatement import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.FileSystem import org.apache.hadoop.fs.Path @@ -20,9 +20,8 @@ import kotlin.Any import kotlin.Exception import kotlin.String import kotlin.require -import kotlin.requireNotNull -@SuppressFBWarnings(value = ["BC_BAD_CAST_TO_ABSTRACT_COLLECTION", "EI_EXPOSE_REP2"]) +@SuppressFBWarnings(value = ["BC_BAD_CAST_TO_ABSTRACT_COLLECTION", "EI_EXPOSE_REP2", "BC_IMPOSSIBLE_CAST"]) class HdfsAdapter : NativeAdapter { private val log: Logger = getLogger(HdfsAdapter::class.java) @@ -47,37 +46,19 @@ class HdfsAdapter : NativeAdapter val columns: MutableList = ArrayList() try { - val sqlBase = parser.sqlBase - if (sqlBase.isSuccessful) + val configuration = this.hdfsConnection?.hdfsConfigure + val statement: SelectStatement = this.parser.statement as SelectStatement + if (statement.selectElements.isNotEmpty()) { - val configuration = this.hdfsConnection?.hdfsConfigure - val sqlBase = this.parser.sqlBase - if (sqlBase.isSuccessful) - { - if (ObjectUtils.isNotEmpty(parser.sqlBase.columns)) - { - headers.addAll(parser.sqlBase.columns) - } - else - { - headers.add("*") - } - types.add("String") - this.adapter(configuration, sqlBase) - .forEach { column -> columns.add(Collections.singletonList(column)) } - response.isSuccessful = Boolean.TRUE - } - else - { - response.isSuccessful = false - response.message = sqlBase.message - } + headers.addAll(statement.selectElements.map { it.column }) } else { - response.isSuccessful = Boolean.FALSE - response.message = sqlBase.message + headers.add("*") } + types.add("String") + this.adapter(configuration, parser.statement).forEach { column -> columns.add(Collections.singletonList(column)) } + response.isSuccessful = Boolean.TRUE } catch (ex: Exception) { @@ -97,19 +78,19 @@ class HdfsAdapter : NativeAdapter return response } - private fun adapter(configuration: Configuration?, info: SqlBase): List + private fun adapter(configuration: Configuration?, statement: SQLStatement): List { - requireNotNull(info.token) { "Token must not be null" } - require(info.token.equals(SqlBaseToken.SHOW.name, ignoreCase = true)) { "Token not supported" } + require(statement is SelectStatement) { "Not supported select statement" } val fileSystem = FileSystem.get(configuration) + val showStatement = statement as ShowStatement - if (info.childToken.equals("DATABASES", ignoreCase = true)) + if (showStatement.showType == ShowStatement.ShowType.DATABASES) { - info.table = "/" + showStatement.tableName = "/" } - return fileSystem.listStatus(Path("/" + info.table)) + return fileSystem.listStatus(Path("/" + showStatement.tableName)) .map { it.path.name } .toList() } diff --git a/plugin/datacap-plugin-hive-2x/pom.xml b/plugin/datacap-plugin-hive-2x/pom.xml index e4303ece19..6623239af8 100644 --- a/plugin/datacap-plugin-hive-2x/pom.xml +++ b/plugin/datacap-plugin-hive-2x/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-hologres/pom.xml b/plugin/datacap-plugin-hologres/pom.xml index 875cd3e7d8..f108e81099 100644 --- a/plugin/datacap-plugin-hologres/pom.xml +++ b/plugin/datacap-plugin-hologres/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-ignite/pom.xml b/plugin/datacap-plugin-ignite/pom.xml index 74837d24f6..44eca88f43 100644 --- a/plugin/datacap-plugin-ignite/pom.xml +++ b/plugin/datacap-plugin-ignite/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-impala/pom.xml b/plugin/datacap-plugin-impala/pom.xml index 3e61ff1bec..4c542ef1d0 100644 --- a/plugin/datacap-plugin-impala/pom.xml +++ b/plugin/datacap-plugin-impala/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-influxdb/pom.xml b/plugin/datacap-plugin-influxdb/pom.xml index 83ffdec5ba..47e0eb8118 100644 --- a/plugin/datacap-plugin-influxdb/pom.xml +++ b/plugin/datacap-plugin-influxdb/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-iotdb/pom.xml b/plugin/datacap-plugin-iotdb/pom.xml index 6d9f780e91..b7da932847 100644 --- a/plugin/datacap-plugin-iotdb/pom.xml +++ b/plugin/datacap-plugin-iotdb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-kafka/pom.xml b/plugin/datacap-plugin-kafka/pom.xml index f77b6398f4..289d398265 100644 --- a/plugin/datacap-plugin-kafka/pom.xml +++ b/plugin/datacap-plugin-kafka/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-kafka/src/main/java/io/edurt/datacap/plugin/natived/kafka/KafkaAdapter.java b/plugin/datacap-plugin-kafka/src/main/java/io/edurt/datacap/plugin/natived/kafka/KafkaAdapter.java index b12996b128..1d42e5a76e 100644 --- a/plugin/datacap-plugin-kafka/src/main/java/io/edurt/datacap/plugin/natived/kafka/KafkaAdapter.java +++ b/plugin/datacap-plugin-kafka/src/main/java/io/edurt/datacap/plugin/natived/kafka/KafkaAdapter.java @@ -6,10 +6,11 @@ import io.edurt.datacap.spi.model.Configure; import io.edurt.datacap.spi.model.Response; import io.edurt.datacap.spi.model.Time; -import io.edurt.datacap.sql.SqlBase; -import io.edurt.datacap.sql.SqlBaseToken; +import io.edurt.datacap.sql.node.element.SelectElement; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.SelectStatement; +import io.edurt.datacap.sql.statement.ShowStatement; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.kafka.clients.admin.AdminClient; @@ -20,6 +21,7 @@ import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.stream.Collectors; @Slf4j @SuppressFBWarnings(value = {"RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE", "REC_CATCH_EXCEPTION"}, @@ -49,24 +51,20 @@ public Response handlerExecute(String content) List types = new ArrayList<>(); List columns = new ArrayList<>(); try { - SqlBase sqlBase = this.parser.getSqlBase(); - if (sqlBase.isSuccessful()) { - AdminClient client = this.kafkaConnection.getClient(); - if (ObjectUtils.isNotEmpty(this.parser.getSqlBase().getColumns())) { - headers.addAll(this.parser.getSqlBase().getColumns()); - } - else { - headers.add("*"); - } - types.add("String"); - this.adapter(client, sqlBase) - .forEach(column -> columns.add(Collections.singletonList(column))); - response.setIsSuccessful(Boolean.TRUE); + SelectStatement statement = (SelectStatement) this.parser.getStatement(); + AdminClient client = this.kafkaConnection.getClient(); + if (!statement.getSelectElements().isEmpty()) { + headers.addAll(statement.getSelectElements() + .stream() + .map(SelectElement::getColumn) + .collect(Collectors.toList())); } else { - response.setIsSuccessful(Boolean.FALSE); - response.setMessage(sqlBase.getMessage()); + headers.add("*"); } + types.add("String"); + this.adapter(client, parser.getStatement()).forEach(column -> columns.add(Collections.singletonList(column))); + response.setIsSuccessful(Boolean.TRUE); } catch (Exception ex) { log.error("Execute content failed content {} exception ", content, ex); @@ -84,16 +82,16 @@ public Response handlerExecute(String content) return response; } - private List adapter(AdminClient client, SqlBase info) + private List adapter(AdminClient client, SQLStatement statement) { List array = new ArrayList<>(); - if (info.getToken().equalsIgnoreCase(SqlBaseToken.SHOW.name())) { - if (info.getChildToken().equalsIgnoreCase(SqlBaseToken.TOPICS.name()) - || info.getChildToken().equalsIgnoreCase("DATABASES")) { + if (statement instanceof ShowStatement) { + ShowStatement info = (ShowStatement) statement; + + if (info.getShowType() == ShowStatement.ShowType.DATABASES) { this.adapterShowTopics(client, array); } - else if (info.getChildToken().equalsIgnoreCase(SqlBaseToken.CONSUMERS.name()) - || info.getChildToken().equalsIgnoreCase("TABLES")) { + else if (info.getShowType() == ShowStatement.ShowType.TABLES) { this.adapterShowConsumers(client, info, array); } } @@ -113,10 +111,10 @@ private void adapterShowTopics(AdminClient client, List array) } } - private void adapterShowConsumers(AdminClient client, SqlBase info, List array) + private void adapterShowConsumers(AdminClient client, ShowStatement info, List array) { try { - if (StringUtils.isNotEmpty(info.getTable())) { + if (StringUtils.isNotEmpty(info.getTableName())) { client.listConsumerGroups() .all() .get() @@ -127,7 +125,7 @@ private void adapterShowConsumers(AdminClient client, SqlBase info, List ConsumerGroupDescription consumerGroupDescription = describeConsumerGroupsResult.all().get().get(v.groupId()); if (consumerGroupDescription.members().stream().anyMatch(member -> member.assignment().topicPartitions().stream().anyMatch(tp -> - tp.topic().equals(info.getTable().replace("`", ""))))) { + tp.topic().equals(info.getTableName().replace("`", ""))))) { array.add(v.groupId()); } } diff --git a/plugin/datacap-plugin-kafka/src/main/java/io/edurt/datacap/plugin/natived/kafka/KafkaParser.java b/plugin/datacap-plugin-kafka/src/main/java/io/edurt/datacap/plugin/natived/kafka/KafkaParser.java index e9bd6b5952..d93b77ad0d 100644 --- a/plugin/datacap-plugin-kafka/src/main/java/io/edurt/datacap/plugin/natived/kafka/KafkaParser.java +++ b/plugin/datacap-plugin-kafka/src/main/java/io/edurt/datacap/plugin/natived/kafka/KafkaParser.java @@ -1,7 +1,9 @@ package io.edurt.datacap.plugin.natived.kafka; import io.edurt.datacap.spi.parser.SqlParser; -import io.edurt.datacap.sql.SqlBase; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.SelectStatement; +import io.edurt.datacap.sql.statement.ShowStatement; public class KafkaParser extends SqlParser @@ -14,13 +16,18 @@ public KafkaParser(String content) @Override public String getExecuteContext() { - SqlBase sqlBase = this.getSqlBase(); - if (sqlBase.getToken().equalsIgnoreCase("SHOW")) { - return sqlBase.getTable(); + SQLStatement statement = this.getStatement(); + + if (statement instanceof SelectStatement) { + SelectStatement selectStatement = (SelectStatement) statement; + return selectStatement.getFromSources().get(0).getTableName(); + } + else if (statement instanceof ShowStatement) { + ShowStatement showStatement = (ShowStatement) statement; + return showStatement.getTableName(); } - else if (sqlBase.getToken().equalsIgnoreCase("SELECT")) { - return sqlBase.getTable(); + else { + throw new RuntimeException("Unsupported statement: " + statement); } - return null; } } diff --git a/plugin/datacap-plugin-kylin/pom.xml b/plugin/datacap-plugin-kylin/pom.xml index 63155cc02d..19c8f6780e 100644 --- a/plugin/datacap-plugin-kylin/pom.xml +++ b/plugin/datacap-plugin-kylin/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-kyuubi/pom.xml b/plugin/datacap-plugin-kyuubi/pom.xml index d6df748ac3..f0922453e2 100644 --- a/plugin/datacap-plugin-kyuubi/pom.xml +++ b/plugin/datacap-plugin-kyuubi/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-matrixone/pom.xml b/plugin/datacap-plugin-matrixone/pom.xml index 6f2d7a6b3c..0b1370e35d 100644 --- a/plugin/datacap-plugin-matrixone/pom.xml +++ b/plugin/datacap-plugin-matrixone/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-monetdb/pom.xml b/plugin/datacap-plugin-monetdb/pom.xml index ec81e9bcc4..3adde43cda 100644 --- a/plugin/datacap-plugin-monetdb/pom.xml +++ b/plugin/datacap-plugin-monetdb/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-mongo-atlas/pom.xml b/plugin/datacap-plugin-mongo-atlas/pom.xml index fb3e120d8a..bad6f60dc0 100644 --- a/plugin/datacap-plugin-mongo-atlas/pom.xml +++ b/plugin/datacap-plugin-mongo-atlas/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-mongo-community/pom.xml b/plugin/datacap-plugin-mongo-community/pom.xml index ad1f685404..e19f3948a6 100644 --- a/plugin/datacap-plugin-mongo-community/pom.xml +++ b/plugin/datacap-plugin-mongo-community/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml @@ -26,14 +26,9 @@ io.edurt.datacap - datacap-driver-mongo + datacap-driver-mongodb ${project.version} - - org.mongodb - mongo-java-driver - 3.12.13 - @@ -84,5 +79,4 @@ - diff --git a/plugin/datacap-plugin-mongo-community/src/main/kotlin/io/edurt/datacap/plugin/MongoCommunityService.kt b/plugin/datacap-plugin-mongo-community/src/main/kotlin/io/edurt/datacap/plugin/MongoCommunityService.kt index 9502af209c..4ba1d8646b 100644 --- a/plugin/datacap-plugin-mongo-community/src/main/kotlin/io/edurt/datacap/plugin/MongoCommunityService.kt +++ b/plugin/datacap-plugin-mongo-community/src/main/kotlin/io/edurt/datacap/plugin/MongoCommunityService.kt @@ -1,6 +1,8 @@ package io.edurt.datacap.plugin import io.edurt.datacap.spi.PluginService +import io.edurt.datacap.spi.model.Configure +import java.util.* class MongoCommunityService : PluginService { @@ -9,6 +11,23 @@ class MongoCommunityService : PluginService return "mongodb" } + override fun url(configure: Configure): String + { + val buffer = StringBuilder() + buffer.append("jdbc:") + buffer.append(configure.type) + buffer.append("://") + buffer.append(configure.host) + buffer.append(":") + buffer.append(configure.port) + + val properties = Properties() + configure.database.ifPresent { properties["database"] = it } + configure.env.ifPresent { properties.putAll(it) } + + return buffer.toString() + } + override fun driver(): String { return "com.dbschema.MongoJdbcDriver" diff --git a/plugin/datacap-plugin-mysql/pom.xml b/plugin/datacap-plugin-mysql/pom.xml index 4b96130547..14a20a1271 100644 --- a/plugin/datacap-plugin-mysql/pom.xml +++ b/plugin/datacap-plugin-mysql/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-neo4j/pom.xml b/plugin/datacap-plugin-neo4j/pom.xml index 7988bc2721..b6f053c60f 100644 --- a/plugin/datacap-plugin-neo4j/pom.xml +++ b/plugin/datacap-plugin-neo4j/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-oceanbase/pom.xml b/plugin/datacap-plugin-oceanbase/pom.xml index e3caeea5ad..0a319ecb5f 100644 --- a/plugin/datacap-plugin-oceanbase/pom.xml +++ b/plugin/datacap-plugin-oceanbase/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-oracle/pom.xml b/plugin/datacap-plugin-oracle/pom.xml index 1c96f9e1ba..4925881861 100644 --- a/plugin/datacap-plugin-oracle/pom.xml +++ b/plugin/datacap-plugin-oracle/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-paradedb/pom.xml b/plugin/datacap-plugin-paradedb/pom.xml index b0e5b82566..a5d245443c 100644 --- a/plugin/datacap-plugin-paradedb/pom.xml +++ b/plugin/datacap-plugin-paradedb/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-phoenix/pom.xml b/plugin/datacap-plugin-phoenix/pom.xml index 3320f20a5a..8e56a99481 100644 --- a/plugin/datacap-plugin-phoenix/pom.xml +++ b/plugin/datacap-plugin-phoenix/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-pinot/pom.xml b/plugin/datacap-plugin-pinot/pom.xml index c9b60322f6..66636dbb11 100644 --- a/plugin/datacap-plugin-pinot/pom.xml +++ b/plugin/datacap-plugin-pinot/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-postgresql/pom.xml b/plugin/datacap-plugin-postgresql/pom.xml index da791908d6..829a2c46dc 100644 --- a/plugin/datacap-plugin-postgresql/pom.xml +++ b/plugin/datacap-plugin-postgresql/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-presto/pom.xml b/plugin/datacap-plugin-presto/pom.xml index 3246a3ade3..9596a6e37c 100644 --- a/plugin/datacap-plugin-presto/pom.xml +++ b/plugin/datacap-plugin-presto/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-questdb/pom.xml b/plugin/datacap-plugin-questdb/pom.xml index 21295faf62..1753d32aa6 100644 --- a/plugin/datacap-plugin-questdb/pom.xml +++ b/plugin/datacap-plugin-questdb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-redis-jdbc/pom.xml b/plugin/datacap-plugin-redis-jdbc/pom.xml index 4c19e490c5..a518c4d4e5 100644 --- a/plugin/datacap-plugin-redis-jdbc/pom.xml +++ b/plugin/datacap-plugin-redis-jdbc/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-redis/pom.xml b/plugin/datacap-plugin-redis/pom.xml index 0a005e78cc..3029c9e977 100644 --- a/plugin/datacap-plugin-redis/pom.xml +++ b/plugin/datacap-plugin-redis/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-scylladb/pom.xml b/plugin/datacap-plugin-scylladb/pom.xml index 062b30bbe1..4e06ad9d46 100644 --- a/plugin/datacap-plugin-scylladb/pom.xml +++ b/plugin/datacap-plugin-scylladb/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-snowflake/pom.xml b/plugin/datacap-plugin-snowflake/pom.xml index d8e8cb3ba2..5f4cd44556 100644 --- a/plugin/datacap-plugin-snowflake/pom.xml +++ b/plugin/datacap-plugin-snowflake/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-solr/pom.xml b/plugin/datacap-plugin-solr/pom.xml index 9ca710b685..d6118a2cf7 100644 --- a/plugin/datacap-plugin-solr/pom.xml +++ b/plugin/datacap-plugin-solr/pom.xml @@ -5,7 +5,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-sqlserver/pom.xml b/plugin/datacap-plugin-sqlserver/pom.xml index ad43e1ef5f..5cd77d6998 100644 --- a/plugin/datacap-plugin-sqlserver/pom.xml +++ b/plugin/datacap-plugin-sqlserver/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-starrocks/pom.xml b/plugin/datacap-plugin-starrocks/pom.xml index 35f43d65b8..93eda22bd3 100644 --- a/plugin/datacap-plugin-starrocks/pom.xml +++ b/plugin/datacap-plugin-starrocks/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-tdengine/pom.xml b/plugin/datacap-plugin-tdengine/pom.xml index 812ac565a9..024246c3b9 100644 --- a/plugin/datacap-plugin-tdengine/pom.xml +++ b/plugin/datacap-plugin-tdengine/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-timescale/pom.xml b/plugin/datacap-plugin-timescale/pom.xml index 33cc351453..33caafd808 100644 --- a/plugin/datacap-plugin-timescale/pom.xml +++ b/plugin/datacap-plugin-timescale/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-trino/pom.xml b/plugin/datacap-plugin-trino/pom.xml index e1f8b7a744..80356eaa48 100644 --- a/plugin/datacap-plugin-trino/pom.xml +++ b/plugin/datacap-plugin-trino/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-ydb/pom.xml b/plugin/datacap-plugin-ydb/pom.xml index 71df5896dc..46b94b4f98 100644 --- a/plugin/datacap-plugin-ydb/pom.xml +++ b/plugin/datacap-plugin-ydb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/plugin/datacap-plugin-zookeeper/pom.xml b/plugin/datacap-plugin-zookeeper/pom.xml index 08a9d8f6f5..f5b4f5099e 100644 --- a/plugin/datacap-plugin-zookeeper/pom.xml +++ b/plugin/datacap-plugin-zookeeper/pom.xml @@ -5,7 +5,7 @@ datacap io.edurt.datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml 4.0.0 diff --git a/plugin/datacap-plugin-zookeeper/src/main/java/io/edurt/datacap/plugin/natived/zookeeper/ZookeeperAdapter.java b/plugin/datacap-plugin-zookeeper/src/main/java/io/edurt/datacap/plugin/natived/zookeeper/ZookeeperAdapter.java index 182ef15f14..00d4e18a1f 100644 --- a/plugin/datacap-plugin-zookeeper/src/main/java/io/edurt/datacap/plugin/natived/zookeeper/ZookeeperAdapter.java +++ b/plugin/datacap-plugin-zookeeper/src/main/java/io/edurt/datacap/plugin/natived/zookeeper/ZookeeperAdapter.java @@ -1,20 +1,20 @@ package io.edurt.datacap.plugin.natived.zookeeper; -import com.google.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.edurt.datacap.spi.adapter.NativeAdapter; import io.edurt.datacap.spi.model.Configure; import io.edurt.datacap.spi.model.Response; import io.edurt.datacap.spi.model.Time; -import io.edurt.datacap.sql.SqlBase; +import io.edurt.datacap.sql.node.element.SelectElement; +import io.edurt.datacap.sql.statement.SelectStatement; import lombok.extern.slf4j.Slf4j; import org.I0Itec.zkclient.ZkClient; -import org.apache.commons.lang3.ObjectUtils; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.stream.Collectors; @Slf4j @SuppressFBWarnings(value = {"RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE", "REC_CATCH_EXCEPTION"}, @@ -44,23 +44,21 @@ public Response handlerExecute(String content) List types = new ArrayList<>(); List columns = new ArrayList<>(); try { - SqlBase sqlBase = this.parser.getSqlBase(); - if (sqlBase.isSuccessful()) { - ZkClient client = this.zookeeperConnection.getClient(); - if (ObjectUtils.isNotEmpty(this.parser.getSqlBase().getColumns())) { - headers.addAll(this.parser.getSqlBase().getColumns()); - } - else { - headers.add("*"); - } - types.add("String"); - client.getChildren(this.parser.getExecuteContext()) - .forEach(column -> columns.add(Collections.singletonList(column))); - response.setIsSuccessful(Boolean.TRUE); + SelectStatement statement = (SelectStatement) this.parser.getStatement(); + ZkClient client = this.zookeeperConnection.getClient(); + if (!statement.getSelectElements().isEmpty()) { + headers.addAll(statement.getSelectElements() + .stream() + .map(SelectElement::getColumn) + .collect(Collectors.toList())); } else { - Preconditions.checkArgument(true, sqlBase.getMessage()); + headers.add("*"); } + types.add("String"); + client.getChildren(this.parser.getExecuteContext()) + .forEach(column -> columns.add(Collections.singletonList(column))); + response.setIsSuccessful(Boolean.TRUE); } catch (Exception ex) { log.error("Execute content failed content {} exception ", content, ex); diff --git a/plugin/datacap-plugin-zookeeper/src/main/java/io/edurt/datacap/plugin/natived/zookeeper/ZookeeperParser.java b/plugin/datacap-plugin-zookeeper/src/main/java/io/edurt/datacap/plugin/natived/zookeeper/ZookeeperParser.java index f6bbaa0b28..515be473c7 100644 --- a/plugin/datacap-plugin-zookeeper/src/main/java/io/edurt/datacap/plugin/natived/zookeeper/ZookeeperParser.java +++ b/plugin/datacap-plugin-zookeeper/src/main/java/io/edurt/datacap/plugin/natived/zookeeper/ZookeeperParser.java @@ -1,7 +1,9 @@ package io.edurt.datacap.plugin.natived.zookeeper; import io.edurt.datacap.spi.parser.SqlParser; -import io.edurt.datacap.sql.SqlBase; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.SelectStatement; +import io.edurt.datacap.sql.statement.ShowStatement; import org.apache.commons.lang3.StringUtils; public class ZookeeperParser @@ -15,16 +17,23 @@ public ZookeeperParser(String content) @Override public String getExecuteContext() { - SqlBase sqlBase = this.getSqlBase(); - if (sqlBase.getToken().equalsIgnoreCase("SHOW")) { - if (StringUtils.isEmpty(sqlBase.getTable())) { + SQLStatement statement = this.getStatement(); + + if (statement instanceof SelectStatement) { + SelectStatement selectStatement = (SelectStatement) statement; + String tableName = selectStatement.getFromSources().get(0).getTableName(); + + if (StringUtils.isEmpty(tableName)) { return ZookeeperPathConvert.start; } - return ZookeeperPathConvert.toPath(sqlBase.getTable()); + return ZookeeperPathConvert.toPath(tableName); + } + else if (statement instanceof ShowStatement) { + ShowStatement showStatement = (ShowStatement) statement; + return ZookeeperPathConvert.toPath(showStatement.getTableName()); } - else if (sqlBase.getToken().equalsIgnoreCase("SELECT")) { - return ZookeeperPathConvert.toPath(sqlBase.getTable()); + else { + throw new RuntimeException("Unsupported statement: " + statement); } - return null; } } diff --git a/pom.xml b/pom.xml index 7f3ae56bfb..fc71e9d284 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap pom - 2024.4.0 + 2024.4.1-SNAPSHOT client/datacap-cli @@ -23,6 +23,7 @@ lib/datacap-schedule driver/datacap-driver-redis driver/datacap-driver-mongo + driver/datacap-driver-mongodb shaded/datacap-shaded-ydb shaded/datacap-shaded-pinot shaded/datacap-shaded-neo4j @@ -109,6 +110,8 @@ test/datacap-test-executor test/datacap-test-scheduler test/datacap-test-fs + test/datacap-test-driver + test/datacap-test-parser datacap @@ -199,6 +202,7 @@ 1.20.1 2.3.2 2.1.214 + 4.11.1 3.6.0 0.10.2 @@ -409,6 +413,11 @@ commons-io ${datacap.commons-io.version} + + org.mongodb + mongodb-driver-sync + ${mongodb-driver.version} + diff --git a/scheduler/datacap-scheduler-local/pom.xml b/scheduler/datacap-scheduler-local/pom.xml index dd5979c4eb..dc321d6682 100644 --- a/scheduler/datacap-scheduler-local/pom.xml +++ b/scheduler/datacap-scheduler-local/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/scheduler/datacap-scheduler-spi/pom.xml b/scheduler/datacap-scheduler-spi/pom.xml index a29e0d9262..f1e0505808 100644 --- a/scheduler/datacap-scheduler-spi/pom.xml +++ b/scheduler/datacap-scheduler-spi/pom.xml @@ -4,7 +4,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/shaded/datacap-shaded-neo4j/pom.xml b/shaded/datacap-shaded-neo4j/pom.xml index 42fb03da02..3a4670eebf 100644 --- a/shaded/datacap-shaded-neo4j/pom.xml +++ b/shaded/datacap-shaded-neo4j/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/shaded/datacap-shaded-pinot/pom.xml b/shaded/datacap-shaded-pinot/pom.xml index 4b231ef191..673d3598e5 100644 --- a/shaded/datacap-shaded-pinot/pom.xml +++ b/shaded/datacap-shaded-pinot/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/shaded/datacap-shaded-ydb/pom.xml b/shaded/datacap-shaded-ydb/pom.xml index 6cae683705..0dfa67be97 100644 --- a/shaded/datacap-shaded-ydb/pom.xml +++ b/shaded/datacap-shaded-ydb/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/test/datacap-test-convert/pom.xml b/test/datacap-test-convert/pom.xml index 8d451c0846..ae9c96d065 100644 --- a/test/datacap-test-convert/pom.xml +++ b/test/datacap-test-convert/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/test/datacap-test-core/pom.xml b/test/datacap-test-core/pom.xml index 52a5b273df..2e82cdb5e7 100644 --- a/test/datacap-test-core/pom.xml +++ b/test/datacap-test-core/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/test/datacap-test-driver/pom.xml b/test/datacap-test-driver/pom.xml new file mode 100644 index 0000000000..143501ba26 --- /dev/null +++ b/test/datacap-test-driver/pom.xml @@ -0,0 +1,28 @@ + + + 4.0.0 + + io.edurt.datacap + datacap + 2024.4.1-SNAPSHOT + ../../pom.xml + + + datacap-test-driver + DataCap - Test - Driver + + + + org.testcontainers + testcontainers + + + io.edurt.datacap + datacap-driver-mongodb + ${project.version} + test + + + diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcBaseTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcBaseTest.java new file mode 100644 index 0000000000..070c0b8be5 --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcBaseTest.java @@ -0,0 +1,118 @@ +package io.edurt.datacap.test.mongo; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoDatabase; +import lombok.extern.slf4j.Slf4j; +import org.bson.Document; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.utility.DockerImageName; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.Statement; +import java.time.Duration; +import java.util.List; +import java.util.Properties; + +@Slf4j +public abstract class MongoJdbcBaseTest +{ + // Static container instance shared by all test classes + @ClassRule + public static final GenericContainer MONGO_CONTAINER = new GenericContainer(DockerImageName.parse("mongo")) + .withExposedPorts(27017) + .withEnv("MONGO_INITDB_ROOT_USERNAME", "mongoadmin") + .withEnv("MONGO_INITDB_ROOT_PASSWORD", "secret") + .withCommand("mongod", "--noauth") + .waitingFor(Wait.forListeningPort() + .withStartupTimeout(Duration.ofSeconds(30))) + .withCommand("mongod", "--auth"); + protected Statement statement; + protected Connection connection; + protected MongoClient mongoClient; + + @Before + public void init() + { + try { + initializeMongoData(); + initializeJdbcConnection(); + } + catch (Exception e) { + throw new RuntimeException("Failed to initialize test environment", e); + } + } + + private void initializeMongoData() + { + String mongoUri = String.format("mongodb://mongoadmin:secret@%s:%d", + MONGO_CONTAINER.getHost(), + MONGO_CONTAINER.getFirstMappedPort()); + mongoClient = MongoClients.create(mongoUri); + + // Clean up existing data + MongoDatabase database = mongoClient.getDatabase("test"); + try { + database.getCollection("sample").drop(); + } + catch (Exception ignored) { + } + + database.createCollection("sample"); + database.getCollection("sample").insertMany( + List.of( + new Document("name", "test1").append("value", 1), + new Document("name", "test2").append("value", 2), + new Document("name", "test3").append("value", 3), + new Document("name", "test1").append("value", 4), + new Document("name", "test2").append("value", 2) + ) + ); + } + + private void initializeJdbcConnection() + throws Exception + { + Class.forName("io.edurt.datacap.driver.MongoJdbcDriver"); + Properties props = new Properties(); + props.setProperty("database", "admin"); + props.setProperty("user", "mongoadmin"); + props.setProperty("password", "secret"); + + String jdbcUrl = String.format("jdbc:mongodb://%s:%d", + MONGO_CONTAINER.getHost(), + MONGO_CONTAINER.getFirstMappedPort() + ); + connection = DriverManager.getConnection(jdbcUrl, props); + statement = connection.createStatement(); + } + + @After + public void cleanup() + { + try { + if (statement != null) { + statement.close(); + } + if (connection != null) { + connection.close(); + } + if (mongoClient != null) { + mongoClient.close(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + } + + static { + MONGO_CONTAINER.setPortBindings(List.of("27017:27017")); + MONGO_CONTAINER.start(); + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverAggregationTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverAggregationTest.java new file mode 100644 index 0000000000..8a018bee0f --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverAggregationTest.java @@ -0,0 +1,64 @@ +package io.edurt.datacap.test.mongo; + +import lombok.extern.slf4j.Slf4j; +import org.junit.Test; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.junit.Assert.assertTrue; + +@Slf4j +public class MongoJdbcDriverAggregationTest + extends MongoJdbcBaseTest +{ + @Test + public void testOrderBy() + throws SQLException + { + log.info("Test order by"); + try (ResultSet rs = statement.executeQuery("SELECT * FROM test.sample ORDER BY name DESC, value DESC")) { + assertTrue(rs.next()); + } + } + + @Test + public void testLimit() + throws SQLException + { + log.info("Test limit"); + try (ResultSet rs = statement.executeQuery("SELECT * FROM test.sample LIMIT 1")) { + assertTrue(rs.next()); + } + } + + @Test + public void testGroupBy() + throws SQLException + { + log.info("Test group by"); + try (ResultSet rs = statement.executeQuery("SELECT name FROM test.sample GROUP BY name")) { + assertTrue(rs.next()); + } + } + + @Test + public void testGroupByWithLimit() + throws SQLException + { + log.info("Test group by with limit"); + try (ResultSet rs = statement.executeQuery("SELECT name FROM test.sample GROUP BY name LIMIT 1")) { + assertTrue(rs.next()); + } + } + + @Test + public void testAggregationFunction() + throws SQLException + { + log.info("Test aggregation function"); + try (ResultSet rs = statement.executeQuery("SELECT name, SUM(value) FROM test.sample GROUP BY name")) { + assertTrue(rs.next()); + } + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverMetadataTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverMetadataTest.java new file mode 100644 index 0000000000..daf6f664d3 --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverMetadataTest.java @@ -0,0 +1,28 @@ +package io.edurt.datacap.test.mongo; + +import org.junit.Test; + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; + +public class MongoJdbcDriverMetadataTest + extends MongoJdbcBaseTest +{ + @Test + public void testSelect() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SELECT * FROM test.sample")) { + ResultSetMetaData metaData = rs.getMetaData(); + + for (int i = 1; i <= metaData.getColumnCount(); i++) { + System.out.println("Column " + i + ":"); + System.out.println(" Name: " + metaData.getColumnName(i)); + System.out.println(" Type: " + metaData.getColumnTypeName(i)); + System.out.println(" SQL Type: " + metaData.getColumnType(i)); + System.out.println(" Java Class: " + metaData.getColumnClassName(i)); + } + } + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverSelectTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverSelectTest.java new file mode 100644 index 0000000000..c2ec862dc9 --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverSelectTest.java @@ -0,0 +1,44 @@ +package io.edurt.datacap.test.mongo; + +import lombok.extern.slf4j.Slf4j; +import org.junit.Test; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.junit.Assert.assertTrue; + +@Slf4j +public class MongoJdbcDriverSelectTest + extends MongoJdbcBaseTest +{ + @Test + public void testSelectAll() + throws SQLException + { + log.info("Test simple select"); + try (ResultSet rs = statement.executeQuery("SELECT * FROM test.sample")) { + assertTrue(rs.next()); + } + } + + @Test + public void testSelectSpecificColumn() + throws SQLException + { + log.info("Test specific select column"); + try (ResultSet rs = statement.executeQuery("SELECT name FROM test.sample")) { + assertTrue(rs.next()); + } + } + + @Test + public void testSelectWithAlias() + throws SQLException + { + log.info("Test alias"); + try (ResultSet rs = statement.executeQuery("SELECT name as n, value as v FROM test.sample")) { + assertTrue(rs.next()); + } + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverShowTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverShowTest.java new file mode 100644 index 0000000000..863a4bfefc --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverShowTest.java @@ -0,0 +1,41 @@ +package io.edurt.datacap.test.mongo; + +import lombok.extern.slf4j.Slf4j; +import org.junit.Test; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.junit.Assert.assertTrue; + +@Slf4j +public class MongoJdbcDriverShowTest + extends MongoJdbcBaseTest +{ + @Test + public void testShowDatabases() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SHOW DATABASES")) { + assertTrue(rs.next()); + } + } + + @Test + public void testShowTables() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SHOW TABLES FROM test")) { + assertTrue(rs.next()); + } + } + + @Test + public void testShowColumns() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SHOW COLUMNS FROM test.sample")) { + assertTrue(rs.next()); + } + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverVersionTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverVersionTest.java new file mode 100644 index 0000000000..21694483c0 --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverVersionTest.java @@ -0,0 +1,21 @@ +package io.edurt.datacap.test.mongo; + +import org.junit.Test; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.junit.Assert.assertTrue; + +public class MongoJdbcDriverVersionTest + extends MongoJdbcBaseTest +{ + @Test + public void test() + throws SQLException + { + try (ResultSet rs = statement.executeQuery("SELECT version()")) { + assertTrue(rs.next()); + } + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverWhereTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverWhereTest.java new file mode 100644 index 0000000000..db9cdf976f --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoJdbcDriverWhereTest.java @@ -0,0 +1,44 @@ +package io.edurt.datacap.test.mongo; + +import lombok.extern.slf4j.Slf4j; +import org.junit.Test; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.junit.Assert.assertTrue; + +@Slf4j +public class MongoJdbcDriverWhereTest + extends MongoJdbcBaseTest +{ + @Test + public void testSimpleWhere() + throws SQLException + { + log.info("Test simple where clause"); + try (ResultSet rs = statement.executeQuery("SELECT * FROM test.sample WHERE value = 2")) { + assertTrue(rs.next()); + } + } + + @Test + public void testWhereWithAnd() + throws SQLException + { + log.info("Test multiple where clause by and"); + try (ResultSet rs = statement.executeQuery("SELECT * FROM test.sample WHERE name = 'test1' AND value = 1")) { + assertTrue(rs.next()); + } + } + + @Test + public void testWhereWithOr() + throws SQLException + { + log.info("Test multiple where clause by or"); + try (ResultSet rs = statement.executeQuery("SELECT * FROM test.sample WHERE name = 'test1' OR name = 'test2'")) { + assertTrue(rs.next()); + } + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoSelectParserTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoSelectParserTest.java new file mode 100644 index 0000000000..951f03be15 --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoSelectParserTest.java @@ -0,0 +1,26 @@ +package io.edurt.datacap.test.mongo; + +import io.edurt.datacap.driver.parser.MongoParser; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class MongoSelectParserTest +{ + private final static String sql = "SELECT name, age FROM users WHERE age > 18 AND city = 'Beijing' ORDER BY age DESC LIMIT 10"; + private MongoParser parser; + + @Before + public void before() + { + this.parser = MongoParser.createParser(sql); + } + + @Test + public void testSelect() + { + assertEquals("users", parser.getCollection()); + assertEquals("{\"aggregate\": \"users\", \"pipeline\": [{\"$match\": {\"$and\": [{\"age\": {\"$gt\": 18}}, {\"city\": \"Beijing\"}]}}, {\"$project\": {\"_id\": 0, \"name\": 1, \"age\": 1}}, {\"$sort\": {\"age\": -1}}, {\"$limit\": 10}], \"cursor\": {}}", parser.getQuery().toJson()); + } +} diff --git a/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoShowParserTest.java b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoShowParserTest.java new file mode 100644 index 0000000000..cc912af0f8 --- /dev/null +++ b/test/datacap-test-driver/src/test/java/io/edurt/datacap/test/mongo/MongoShowParserTest.java @@ -0,0 +1,92 @@ +package io.edurt.datacap.test.mongo; + +import io.edurt.datacap.driver.parser.MongoParser; +import io.edurt.datacap.sql.SQLParseException; +import org.bson.Document; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +public class MongoShowParserTest +{ + @Test + public void testShowDatabases() + { + // Basic SHOW DATABASES + MongoParser parser = MongoParser.createParser("SHOW DATABASES"); + assertEquals("listDatabases", parser.getCommand()); + assertNull(parser.getFilter()); + + // SHOW DATABASES with LIKE pattern + parser = MongoParser.createParser("SHOW DATABASES LIKE '%test%'"); + assertEquals("listDatabases", parser.getCommand()); + assertNotNull(parser.getFilter()); + Document filter = parser.getFilter(); + assertTrue(filter.containsKey("name")); + Document regex = (Document) filter.get("name"); + assertEquals(".*test.*", regex.get("$regex")); + } + + @Test + public void testShowTables() + { + // Basic SHOW TABLES + MongoParser parser = MongoParser.createParser("SHOW TABLES"); + assertEquals("listCollections", parser.getCommand()); + assertNull(parser.getDatabase()); + assertNull(parser.getFilter()); + + // SHOW TABLES FROM database + parser = MongoParser.createParser("SHOW TABLES FROM mydb"); + assertEquals("listCollections", parser.getCommand()); + assertEquals("mydb", parser.getDatabase()); + assertNull(parser.getFilter()); + + // SHOW TABLES with LIKE pattern + parser = MongoParser.createParser("SHOW TABLES FROM mydb LIKE '%user%'"); + assertEquals("listCollections", parser.getCommand()); + assertEquals("mydb", parser.getDatabase()); + assertNotNull(parser.getFilter()); + Document filter = parser.getFilter(); + assertTrue(filter.containsKey("name")); + Document regex = (Document) filter.get("name"); + assertEquals(".*user.*", regex.get("$regex")); + } + + @Test + public void testShowColumns() + { + // Basic SHOW COLUMNS + MongoParser parser = MongoParser.createParser("SHOW COLUMNS FROM users"); + assertEquals("listFields", parser.getCommand()); + assertEquals("users", parser.getCollection()); + assertNull(parser.getDatabase()); + assertNull(parser.getFilter()); + + // SHOW COLUMNS with database + parser = MongoParser.createParser("SHOW COLUMNS FROM users FROM mydb"); + assertEquals("listFields", parser.getCommand()); + assertEquals("users", parser.getCollection()); + assertEquals("mydb", parser.getDatabase()); + assertNull(parser.getFilter()); + + // SHOW COLUMNS with LIKE pattern + parser = MongoParser.createParser("SHOW COLUMNS FROM users LIKE '%id%'"); + assertEquals("listFields", parser.getCommand()); + assertEquals("users", parser.getCollection()); + assertNotNull(parser.getFilter()); + Document filter = parser.getFilter(); + assertTrue(filter.containsKey("name")); + Document regex = (Document) filter.get("name"); + assertEquals(".*id.*", regex.get("$regex")); + } + + @Test(expected = SQLParseException.class) + public void testInvalidStatement() + { + MongoParser.createParser("SHO"); + } +} diff --git a/test/datacap-test-driver/src/test/resources/logback.xml b/test/datacap-test-driver/src/test/resources/logback.xml new file mode 100644 index 0000000000..5d804bfa26 --- /dev/null +++ b/test/datacap-test-driver/src/test/resources/logback.xml @@ -0,0 +1,12 @@ + + + + + %date %level [%thread] %logger [%file:%line] %msg%n + + + + + + + diff --git a/test/datacap-test-executor/pom.xml b/test/datacap-test-executor/pom.xml index 80c1586d7c..54bafd4131 100644 --- a/test/datacap-test-executor/pom.xml +++ b/test/datacap-test-executor/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/test/datacap-test-fs/pom.xml b/test/datacap-test-fs/pom.xml index 43e5c643bd..d7862dd0f5 100644 --- a/test/datacap-test-fs/pom.xml +++ b/test/datacap-test-fs/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/test/datacap-test-lib/pom.xml b/test/datacap-test-lib/pom.xml index 104679b1dc..a443fb9171 100644 --- a/test/datacap-test-lib/pom.xml +++ b/test/datacap-test-lib/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/test/datacap-test-parser/pom.xml b/test/datacap-test-parser/pom.xml new file mode 100644 index 0000000000..cb6aa842ac --- /dev/null +++ b/test/datacap-test-parser/pom.xml @@ -0,0 +1,23 @@ + + + 4.0.0 + + io.edurt.datacap + datacap + 2024.4.1-SNAPSHOT + ../../pom.xml + + + datacap-test-parser + DataCap - Test - Parser + + + + io.edurt.datacap + datacap-parser + test + + + diff --git a/test/datacap-test-parser/src/test/java/io/edurt/datacap/test/basic/SelectStatementTest.java b/test/datacap-test-parser/src/test/java/io/edurt/datacap/test/basic/SelectStatementTest.java new file mode 100644 index 0000000000..f854e7f229 --- /dev/null +++ b/test/datacap-test-parser/src/test/java/io/edurt/datacap/test/basic/SelectStatementTest.java @@ -0,0 +1,155 @@ +package io.edurt.datacap.test.basic; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.edurt.datacap.sql.SQLParser; +import io.edurt.datacap.sql.node.Expression; +import io.edurt.datacap.sql.node.clause.JoinClause; +import io.edurt.datacap.sql.node.element.OrderByElement; +import io.edurt.datacap.sql.node.element.SelectElement; +import io.edurt.datacap.sql.node.element.TableElement; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.SelectStatement; +import org.junit.Test; + +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +@SuppressFBWarnings(value = {"JUA_DONT_ASSERT_INSTANCEOF_IN_TESTS"}) +public class SelectStatementTest +{ + @Test + public void testSimpleSelect() + { + String sql = "SELECT id, name FROM users"; + SQLStatement stmt = SQLParser.parse(sql); + + assertTrue(stmt instanceof SelectStatement); + SelectStatement select = (SelectStatement) stmt; + + List selectElements = select.getSelectElements(); + assertEquals(2, selectElements.size()); + assertEquals("id", selectElements.get(0).getColumn()); + assertEquals("name", selectElements.get(1).getColumn()); + + List fromSources = select.getFromSources(); + assertEquals(1, fromSources.size()); + assertEquals("users", fromSources.get(0).getTableName()); + } + + @Test + public void testSelectWithWhereAndOrderBy() + { + String sql = "SELECT id, name FROM users WHERE age > 18 ORDER BY name DESC"; + SelectStatement select = (SelectStatement) SQLParser.parse(sql); + + Expression where = select.getWhereClause(); + assertNotNull(where); + assertEquals(Expression.ExpressionType.BINARY_OP, where.getType()); + + List orderBy = select.getOrderByElements(); + assertEquals(1, orderBy.size()); + assertFalse(orderBy.get(0).isAscending()); + } + + @Test + public void testSelectWithJoin() + { + String sql = "SELECT u.id, u.name, o.order_id " + + "FROM users u " + + "LEFT JOIN orders o ON u.id = o.user_id"; + SelectStatement select = (SelectStatement) SQLParser.parse(sql); + + List fromSources = select.getFromSources(); + assertEquals(1, fromSources.size()); + TableElement mainTable = fromSources.get(0); + assertEquals("users", mainTable.getTableName()); + assertEquals("u", mainTable.getAlias()); + + List joins = mainTable.getJoins(); + assertEquals(1, joins.size()); + JoinClause join = joins.get(0); + assertEquals(JoinClause.JoinType.LEFT, join.getJoinType()); + assertEquals("orders", join.getRightTable().getTableName()); + assertEquals("o", join.getRightTable().getAlias()); + } + + @Test + public void testComplexSelect() + { + String sql = "SELECT " + + " u.id, " + + " u.name, " + + " COUNT(o.order_id) as order_count " + + "FROM users u " + + "LEFT JOIN orders o ON u.id = o.user_id " + + "WHERE u.status = 'active' " + + "GROUP BY u.id, u.name " + + "HAVING COUNT(o.order_id) > 5 " + + "ORDER BY order_count DESC " + + "LIMIT 10"; + + SelectStatement select = (SelectStatement) SQLParser.parse(sql); + + List selectElements = select.getSelectElements(); + assertEquals(3, selectElements.size()); + assertEquals("order_count", selectElements.get(2).getAlias()); + + List groupBy = select.getGroupByElements(); + assertEquals(2, groupBy.size()); + + Expression having = select.getHavingClause(); + assertNotNull(having); + + assertNotNull(select.getLimitClause()); + assertEquals(10, select.getLimitClause().getLimit()); + } + + @Test + public void testSelectWithSubquery() + { + String sql = "SELECT * FROM (SELECT id, name FROM users) as u"; + SelectStatement select = (SelectStatement) SQLParser.parse(sql); + + List fromSources = select.getFromSources(); + assertNotNull("FromSources should not be null", fromSources); + assertEquals("Should have one source", 1, fromSources.size()); + + TableElement table = fromSources.get(0); + assertNotNull("Table should not be null", table); + assertEquals("Alias should be 'u'", "u", table.getAlias()); + + SelectStatement subquery = table.getSubquery(); + assertNotNull("Subquery should not be null", subquery); + + List subqueryElements = subquery.getSelectElements(); + assertEquals("Subquery should have 2 columns", 2, subqueryElements.size()); + assertEquals("First column should be 'id'", "id", subqueryElements.get(0).getColumn()); + assertEquals("Second column should be 'name'", "name", subqueryElements.get(1).getColumn()); + } + + @Test + public void testSelectWithCaseWhen() + { + String sql = "SELECT id, " + + "CASE WHEN age < 18 THEN 'minor' " + + " WHEN age < 60 THEN 'adult' " + + " ELSE 'senior' END as age_group " + + "FROM users"; + + SelectStatement select = (SelectStatement) SQLParser.parse(sql); + List selectElements = select.getSelectElements(); + assertEquals(2, selectElements.size()); + assertEquals("age_group", selectElements.get(1).getAlias()); + } + + @Test(expected = RuntimeException.class) + public void testInvalidSQL() + { + String sql = "SELECT * FORM users"; + SQLParser.parse(sql); + } +} diff --git a/test/datacap-test-parser/src/test/java/io/edurt/datacap/test/basic/ShowStatementTest.java b/test/datacap-test-parser/src/test/java/io/edurt/datacap/test/basic/ShowStatementTest.java new file mode 100644 index 0000000000..ff4391b615 --- /dev/null +++ b/test/datacap-test-parser/src/test/java/io/edurt/datacap/test/basic/ShowStatementTest.java @@ -0,0 +1,177 @@ +package io.edurt.datacap.test.basic; + +import io.edurt.datacap.sql.SQLParser; +import io.edurt.datacap.sql.statement.SQLStatement; +import io.edurt.datacap.sql.statement.ShowStatement; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +public class ShowStatementTest +{ + private ShowStatement parse(String sql) + { + SQLStatement stmt = SQLParser.parse(sql); + + assertTrue(stmt instanceof ShowStatement); + return (ShowStatement) stmt; + } + + @Test + public void testShowDatabases() + { + // Basic SHOW DATABASES + ShowStatement stmt = parse("SHOW DATABASES"); + assertEquals(ShowStatement.ShowType.DATABASES, stmt.getShowType()); + assertNull(stmt.getPattern()); + assertNull(stmt.getWhereCondition()); + + // SHOW DATABASES with LIKE pattern + stmt = parse("SHOW DATABASES LIKE '%test%'"); + assertEquals(ShowStatement.ShowType.DATABASES, stmt.getShowType()); + assertEquals("%test%", stmt.getPattern()); + assertNull(stmt.getWhereCondition()); + + // Case insensitivity test + stmt = parse("show DATABASES like '%TEST%'"); + assertEquals(ShowStatement.ShowType.DATABASES, stmt.getShowType()); + assertEquals("%TEST%", stmt.getPattern()); + } + + @Test + public void testShowTables() + { + // Basic SHOW TABLES + ShowStatement stmt = parse("SHOW TABLES"); + assertEquals(ShowStatement.ShowType.TABLES, stmt.getShowType()); + assertNull(stmt.getDatabaseName()); + assertNull(stmt.getPattern()); + assertNull(stmt.getWhereCondition()); + + // SHOW TABLES with database + stmt = parse("SHOW TABLES FROM mydb"); + assertEquals(ShowStatement.ShowType.TABLES, stmt.getShowType()); + assertEquals("mydb", stmt.getDatabaseName()); + + // SHOW TABLES with IN keyword + stmt = parse("SHOW TABLES IN mydb"); + assertEquals(ShowStatement.ShowType.TABLES, stmt.getShowType()); + assertEquals("mydb", stmt.getDatabaseName()); + + // SHOW TABLES with LIKE pattern + stmt = parse("SHOW TABLES FROM mydb LIKE '%user%'"); + assertEquals(ShowStatement.ShowType.TABLES, stmt.getShowType()); + assertEquals("mydb", stmt.getDatabaseName()); + assertEquals("%user%", stmt.getPattern()); + + // SHOW TABLES with WHERE clause + stmt = parse("SHOW TABLES FROM mydb WHERE Tables_in_mydb LIKE '%user%'"); + assertEquals(ShowStatement.ShowType.TABLES, stmt.getShowType()); + assertEquals("mydb", stmt.getDatabaseName()); + assertNotNull(stmt.getWhereCondition()); + } + + @Test + public void testShowColumns() + { + // Basic SHOW COLUMNS + ShowStatement stmt = parse("SHOW COLUMNS FROM users"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("users", stmt.getTableName()); + assertNull(stmt.getDatabaseName()); + assertNull(stmt.getPattern()); + + // SHOW COLUMNS with database + stmt = parse("SHOW COLUMNS FROM users FROM mydb"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("users", stmt.getTableName()); + assertEquals("mydb", stmt.getDatabaseName()); + + // Alternative syntax with IN + stmt = parse("SHOW COLUMNS FROM users IN mydb"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("users", stmt.getTableName()); + assertEquals("mydb", stmt.getDatabaseName()); + + // SHOW COLUMNS with LIKE pattern + stmt = parse("SHOW COLUMNS FROM users LIKE '%id%'"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("users", stmt.getTableName()); + assertEquals("%id%", stmt.getPattern()); + + // SHOW COLUMNS with WHERE clause + stmt = parse("SHOW COLUMNS FROM users WHERE Field LIKE '%id%'"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("users", stmt.getTableName()); + assertNotNull(stmt.getWhereCondition()); + + // Full syntax test + stmt = parse("SHOW COLUMNS FROM users IN mydb WHERE Field LIKE '%id%'"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("users", stmt.getTableName()); + assertEquals("mydb", stmt.getDatabaseName()); + assertNotNull(stmt.getWhereCondition()); + } + + @Test + public void testEdgeCases() + { + // Mixed case + ShowStatement stmt = parse("ShOw DaTaBaSeS LiKe '%test%'"); + assertEquals(ShowStatement.ShowType.DATABASES, stmt.getShowType()); + assertEquals("%test%", stmt.getPattern()); + + // Extra whitespace + stmt = parse("SHOW TABLES FROM mydb"); + assertEquals(ShowStatement.ShowType.TABLES, stmt.getShowType()); + assertEquals("mydb", stmt.getDatabaseName()); + + // Complex WHERE conditions + stmt = parse("SHOW COLUMNS FROM users WHERE Field LIKE '%id%' AND Type = 'int'"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("users", stmt.getTableName()); + assertNotNull(stmt.getWhereCondition()); + } + + @Test(expected = RuntimeException.class) + public void testInvalidSyntax() + { + // This should throw an exception + parse("SHOW INVALID"); + } + + @Test + public void testQuotedIdentifiers() + { + // Test with quoted database name + ShowStatement stmt = parse("SHOW TABLES FROM `my-db`"); + assertEquals(ShowStatement.ShowType.TABLES, stmt.getShowType()); + assertEquals("`my-db`", stmt.getDatabaseName()); + + // Test with quoted table name + stmt = parse("SHOW COLUMNS FROM `user-table`"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("`user-table`", stmt.getTableName()); + + // Test with both quoted + stmt = parse("SHOW COLUMNS FROM `user-table` IN `my-db`"); + assertEquals(ShowStatement.ShowType.COLUMNS, stmt.getShowType()); + assertEquals("`user-table`", stmt.getTableName()); + assertEquals("`my-db`", stmt.getDatabaseName()); + } + + @Test + public void testPatternQuotes() + { + // Test with single quotes + ShowStatement stmt = parse("SHOW DATABASES LIKE '%test%'"); + assertEquals("%test%", stmt.getPattern()); + + // Test with double quotes + stmt = parse("SHOW DATABASES LIKE \"%test%\""); + assertEquals("%test%", stmt.getPattern()); + } +} diff --git a/test/datacap-test-plugin/pom.xml b/test/datacap-test-plugin/pom.xml index 7777b48de1..b8e9b63d0a 100644 --- a/test/datacap-test-plugin/pom.xml +++ b/test/datacap-test-plugin/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml diff --git a/test/datacap-test-scheduler/pom.xml b/test/datacap-test-scheduler/pom.xml index b921adbc9a..89a87bbfe4 100644 --- a/test/datacap-test-scheduler/pom.xml +++ b/test/datacap-test-scheduler/pom.xml @@ -6,7 +6,7 @@ io.edurt.datacap datacap - 2024.4.0 + 2024.4.1-SNAPSHOT ../../pom.xml