diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..097f9f9 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,9 @@ +# +# https://help.github.com/articles/dealing-with-line-endings/ +# +# Linux start script should use lf +/gradlew text eol=lf + +# These are Windows script files and should use crlf +*.bat text eol=crlf + diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..c5ebbcf --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,7 @@ +# Lines starting with '#' are comments. +# Each line is a file pattern followed by one or more owners. + +# See: https://help.github.com/articles/about-codeowners/ + +# These owners will be the default owners for everything in the repo. +* @hasithaa @prakanth97 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..96b073c --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,12 @@ +## Purpose + +Fixes: + +## Examples + +## Checklist +- [ ] Linked to an issue +- [ ] Updated the changelog +- [ ] Added tests +- [ ] Updated the spec +- [ ] Checked native-image compatibility diff --git a/.github/workflows/build-timestamped-master.yml b/.github/workflows/build-timestamped-master.yml new file mode 100644 index 0000000..dc0b8e4 --- /dev/null +++ b/.github/workflows/build-timestamped-master.yml @@ -0,0 +1,18 @@ +name: Build + +on: + push: + branches: + - main + paths-ignore: + - '*.md' + - 'docs/**' + - 'load-tests/**' + workflow_dispatch: + +jobs: + call_workflow: + name: Run Build Workflow + if: ${{ github.repository_owner == 'ballerina-platform' }} + uses: ballerina-platform/ballerina-standard-library/.github/workflows/build-timestamp-master-template.yml@main + secrets: inherit diff --git a/.github/workflows/build-with-bal-test-graalvm.yml b/.github/workflows/build-with-bal-test-graalvm.yml new file mode 100644 index 0000000..fc149e3 --- /dev/null +++ b/.github/workflows/build-with-bal-test-graalvm.yml @@ -0,0 +1,37 @@ +name: GraalVM Check + +on: + workflow_dispatch: + inputs: + lang_tag: + description: Branch/Release Tag of the Ballerina Lang + required: true + default: master + lang_version: + description: Ballerina Lang Version (If given ballerina lang build will be skipped) + required: false + default: '' + native_image_options: + description: Default native-image options + required: false + default: '' + schedule: + - cron: '30 18 * * *' + pull_request: + branches: + - main + types: [ opened, synchronize, reopened, labeled, unlabeled ] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} + cancel-in-progress: true + +jobs: + call_stdlib_workflow: + name: Run StdLib Workflow + if: ${{ github.event_name != 'schedule' || (github.event_name == 'schedule' && github.repository_owner == 'ballerina-platform') }} + uses: ballerina-platform/ballerina-standard-library/.github/workflows/build-with-bal-test-graalvm-template.yml@main + with: + lang_tag: ${{ inputs.lang_tag }} + lang_version: ${{ inputs.lang_version }} + native_image_options: '-J-Xmx7G ${{ inputs.native_image_options }}' diff --git a/.github/workflows/central-publish.yml b/.github/workflows/central-publish.yml new file mode 100644 index 0000000..c0bd478 --- /dev/null +++ b/.github/workflows/central-publish.yml @@ -0,0 +1,21 @@ +name: Publish to the Ballerina central + +on: + workflow_dispatch: + inputs: + environment: + type: choice + description: Select Environment + required: true + options: + - DEV CENTRAL + - STAGE CENTRAL + +jobs: + call_workflow: + name: Run Central Publish Workflow + if: ${{ github.repository_owner == 'ballerina-platform' }} + uses: ballerina-platform/ballerina-standard-library/.github/workflows/central-publish-template.yml@main + secrets: inherit + with: + environment: ${{ github.event.inputs.environment }} diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml new file mode 100644 index 0000000..d5031f5 --- /dev/null +++ b/.github/workflows/publish-release.yml @@ -0,0 +1,16 @@ +name: Publish Release + +on: + workflow_dispatch: + repository_dispatch: + types: [stdlib-release-pipeline] + +jobs: + call_workflow: + name: Run Release Workflow + if: ${{ github.repository_owner == 'ballerina-platform' }} + uses: ballerina-platform/ballerina-standard-library/.github/workflows/release-package-template.yml@main + secrets: inherit + with: + package-name: data.jsondata + package-org: ballerina diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml new file mode 100644 index 0000000..48bfdc3 --- /dev/null +++ b/.github/workflows/pull-request.yml @@ -0,0 +1,14 @@ +name: Pull Request + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} + cancel-in-progress: true + +on: pull_request + +jobs: + call_workflow: + name: Run PR Build Workflow + if: ${{ github.repository_owner == 'ballerina-platform' }} + uses: ballerina-platform/ballerina-standard-library/.github/workflows/pull-request-build-template.yml@main + secrets: inherit diff --git a/.github/workflows/stale-check.yml b/.github/workflows/stale-check.yml new file mode 100644 index 0000000..8763360 --- /dev/null +++ b/.github/workflows/stale-check.yml @@ -0,0 +1,19 @@ +name: 'Close stale pull requests' + +on: + schedule: + - cron: '30 19 * * *' + workflow_dispatch: + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v3 + with: + stale-pr-message: 'This PR has been open for more than 15 days with no activity. This will be closed in 3 days unless the `stale` label is removed or commented.' + close-pr-message: 'Closed PR due to inactivity for more than 18 days.' + days-before-pr-stale: 15 + days-before-pr-close: 3 + days-before-issue-stale: -1 + days-before-issue-close: -1 diff --git a/.github/workflows/trivy-scan.yml b/.github/workflows/trivy-scan.yml new file mode 100644 index 0000000..b9b5a62 --- /dev/null +++ b/.github/workflows/trivy-scan.yml @@ -0,0 +1,13 @@ +name: Trivy + +on: + workflow_dispatch: + schedule: + - cron: '30 20 * * *' + +jobs: + call_workflow: + name: Run Trivy Scan Workflow + if: ${{ github.repository_owner == 'ballerina-platform' }} + uses: ballerina-platform/ballerina-standard-library/.github/workflows/trivy-scan-template.yml@main + secrets: inherit diff --git a/.gitignore b/.gitignore index 524f096..8262b3f 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ # Package Files # *.jar +!gradle/wrapper/gradle-wrapper.jar *.war *.nar *.ear @@ -22,3 +23,26 @@ # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml hs_err_pid* replay_pid* + +# Ignore Gradle project-specific cache directory +.gradle + +# Ignore Gradle build output directory +build + +.gradle/ +target +bin/ + +# IDEA Files +.idea/ +*.iml +*.ipr +*.iws + +# MacOS +*.DS_Store + +# Ballerina +velocity.log* +*Ballerina.lock diff --git a/README.md b/README.md index f2bc4a0..54f2ee0 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,267 @@ -# module-ballerina-data.jsondata +# Ballerina JSON Data Library + The Ballerina JSON Data Library is a comprehensive toolkit designed to facilitate the handling and manipulation of JSON data within Ballerina applications. It streamlines the process of converting JSON data to native Ballerina data types, enabling developers to work with JSON content seamlessly and efficiently. + +## Features + +- **Versatile JSON Data Input**: Accept JSON data as a ballerina JSON value, a string, byte array, or a stream and convert it into a subtype of anydata. +- **JSON to anydata Value Conversion**: Transform JSON data into expected type which is subtype of anydata. +- **Projection Support**: Perform selective conversion of JSON data subsets into anydata values through projection. + +## Usage + +### Converting JSON Document value to a record value + +To convert an JSON document value to a record value, you can utilize the `parseAsType` function provided by the library. The example below showcases the transformation of an JSON document value into a record value. + +```ballerina +import ballerina/data.jsondata; +import ballerina/io; + +type Book record { + string name; + string author; + int year; +}; + +public function main() returns error? { + json jsonContent = { + "name": "Clean Code", + "author": "Robert C. Martin", + "year": 2008 + }; + + Book book = check jsondata:parseAsType(jsonContent); + io:println(book); +} +``` + +### Converting external JSON document to a record value + +For transforming JSON content from an external source into a record value, the `parseString`, `parseBytes`, `parseStream` functions can be used. This external source can be in the form of a string or a byte array/byte-block-stream that houses the JSON data. This is commonly extracted from files or network sockets. The example below demonstrates the conversion of an JSON value from an external source into a record value. + +```ballerina +import ballerina/data.jsondata; +import ballerina/io; + +type Book record { + string name; + string author; + int year; +}; + +public function main() returns error? { + string jsonContent = check io:fileReadString("path/to/file.json"); + Book book = check jsondata:parseString(jsonContent); + io:println(book); +} +``` + +Make sure to handle possible errors that may arise during the file reading or JSON to anydata conversion process. The `check` keyword is utilized to handle these errors, but more sophisticated error handling can be implemented as per your requirements. + +## JSON to anydata representation + +The conversion of JSON data to subtype of anydata representation is a fundamental feature of the library. + +### JSON Object + +The JSON Object can be represented as a value of type record/map in Ballerina which facilitates a structured and type-safe approach to handling JSON data. + +Take for instance the following JSON Object snippet: + +```json +{ + "author": "Robert C. Martin", + "books": [ + { + "name": "Clean Code", + "year": 2008 + }, + { + "name": "Clean Architecture", + "year": 2017 + } + ] +} +``` + +This JSON Object can be represented as a record value in Ballerina as follows: + +```ballerina +type Author record { + string author; + Book[] books; +}; + +type Book record { + string name; + int year; +}; + +public function main() returns error? { + json jsonContent = { + "author": "Robert C. Martin", + "books": [ + { + "name": "Clean Code", + "year": 2008 + }, + { + "name": "Clean Architecture", + "year": 2017 + } + ] + }; + + Author author = check jsondata:parseAsType(jsonContent); + io:println(author); +} +``` + +### JSON Array + +The JSON Array can be represented as an array/tuple values in Ballerina. + +```json +[ + { + "name": "Clean Code", + "year": 2008 + }, + { + "name": "Clean Architecture", + "year": 2017 + } +] +``` + +This JSON Array can be converted as an array/tuple in Ballerina as follows: + +```ballerina +type Book record { + string name; + int year; +}; + +public function main() returns error? { + json jsonContent = [ + { + "name": "Clean Code", + "year": 2008 + }, + { + "name": "Clean Architecture", + "year": 2017 + } + ]; + + Book[] bookArr = check jsondata:parseAsType(jsonContent); + io:println(bookArr); + + [Book, Book] bookTuple = check jsondata:parseAsType(jsonContent); + io:println(bookTuple); +} +``` + +### Controlling the JSON to record conversion + +The library allows for selective conversion of JSON into closed records. This is beneficial when the JSON data contains members that are not necessary to be transformed into record fields. + +```json +{ + "name": "Clean Code", + "author": "Robert C. Martin", + "year": 2008, + "publisher": "Prentice Hall" +} +``` + +The JSON data above contains `publisher` and `year` fields which are not required to be converted into a record field. + +```ballerina +type Book record {| + string name; + string author; +|}; + +public function main() returns error? { + json jsonContent = { + "name": "Clean Code", + "author": "Robert C. Martin", + "year": 2008, + "publisher": "Prentice Hall" + }; + + Book book = check jsondata:parseAsType(jsonContent); + io:println(book); +} +``` + +However, if the rest field is utilized (or if the record type is defined as an open record), all members in the JSON data will be transformed into record fields: + +```ballerina +type Book record { + string name; + string author; +} +``` + +In this instance, all other members in the JSON data, such as `year` and `publisher` will be transformed into `anydata-typed` fields with the corresponding JSON object member as the key-value pair. + +This behavior extends to arrays as well. + +The process of projecting JSON data into a record supports various use cases, including the filtering out of unnecessary members. This functionality is anticipated to be enhanced in the future to accommodate more complex scenarios, such as filtering values based on regular expressions, among others. + +## Issues and projects + +Issues and Projects tabs are disabled for this repository as this is part of the Ballerina library. To report bugs, request new features, start new discussions, view project boards, etc. please visit Ballerina library [parent repository](https://github.com/ballerina-platform/ballerina-library). + +This repository only contains the source code for the package. + +## Building from the source + +### Set up the prerequisites + +1. Download and install Java SE Development Kit (JDK) version 17 (from one of the following locations). + * [Oracle](https://www.oracle.com/java/technologies/downloads/) + * [OpenJDK](https://adoptium.net/) + +2. Export your GitHub personal access token with the read package permissions as follows. + + export packageUser= + export packagePAT= + +### Building the source + +Execute the commands below to build from source. + +1. To build the library: + + ./gradlew clean build + +2. Publish ZIP artifact to the local `.m2` repository: + + ./gradlew clean build publishToMavenLocal + +3. Publish the generated artifacts to the local Ballerina central repository: + + ./gradlew clean build -PpublishToLocalCentral=true + +4. Publish the generated artifacts to the Ballerina central repository: + + ./gradlew clean build -PpublishToCentral=true + +## Contributing to Ballerina + +As an open source project, Ballerina welcomes contributions from the community. + +For more information, go to the [contribution guidelines](https://github.com/ballerina-platform/ballerina-lang/blob/master/CONTRIBUTING.md). + +## Code of conduct + +All contributors are encouraged to read the [Ballerina code of conduct](https://ballerina.io/code-of-conduct). + +## Useful links + +* Chat live with us via our [Discord server](https://discord.gg/ballerinalang). +* Post all technical questions on Stack Overflow with the [#ballerina](https://stackoverflow.com/questions/tagged/ballerina) tag. diff --git a/ballerina/Ballerina.toml b/ballerina/Ballerina.toml new file mode 100644 index 0000000..78438f3 --- /dev/null +++ b/ballerina/Ballerina.toml @@ -0,0 +1,18 @@ +[package] +org = "ballerina" +name = "data.jsondata" +version = "0.1.0" +authors = ["Ballerina"] +keywords = ["json"] +repository = "https://github.com/ballerina-platform/module-ballerina.jsondata" +license = ["Apache-2.0"] +distribution = "2201.8.4" + +[platform.java17] +graalvmCompatible = true + +[[platform.java17.dependency]] +groupId = "io.ballerina.lib" +artifactId = "jsondata-native" +version = "0.1.0" +path = "../native/build/libs/data.jsondata-native-0.1.0-SNAPSHOT.jar" diff --git a/ballerina/CompilerPlugin.toml b/ballerina/CompilerPlugin.toml new file mode 100644 index 0000000..847ec27 --- /dev/null +++ b/ballerina/CompilerPlugin.toml @@ -0,0 +1,6 @@ +[plugin] +id = "constraint-compiler-plugin" +class = "io.ballerina.lib.data.jsondata.compiler.JsondataCompilerPlugin" + +[[dependency]] +path = "../compiler-plugin/build/libs/data.jsondata-compiler-plugin-0.1.0-SNAPSHOT.jar" diff --git a/ballerina/Dependencies.toml b/ballerina/Dependencies.toml new file mode 100644 index 0000000..636b88c --- /dev/null +++ b/ballerina/Dependencies.toml @@ -0,0 +1,74 @@ +# AUTO-GENERATED FILE. DO NOT MODIFY. + +# This file is auto-generated by Ballerina for managing dependency versions. +# It should not be modified by hand. + +[ballerina] +dependencies-toml-version = "2" +distribution-version = "2201.8.4" + +[[package]] +org = "ballerina" +name = "data.jsondata" +version = "0.1.0" +dependencies = [ + {org = "ballerina", name = "io"}, + {org = "ballerina", name = "jballerina.java"}, + {org = "ballerina", name = "test"} +] +modules = [ + {org = "ballerina", packageName = "data.jsondata", moduleName = "data.jsondata"} +] + +[[package]] +org = "ballerina" +name = "io" +version = "1.6.0" +scope = "testOnly" +dependencies = [ + {org = "ballerina", name = "jballerina.java"}, + {org = "ballerina", name = "lang.value"} +] +modules = [ + {org = "ballerina", packageName = "io", moduleName = "io"} +] + +[[package]] +org = "ballerina" +name = "jballerina.java" +version = "0.0.0" +modules = [ + {org = "ballerina", packageName = "jballerina.java", moduleName = "jballerina.java"} +] + +[[package]] +org = "ballerina" +name = "lang.error" +version = "0.0.0" +scope = "testOnly" +dependencies = [ + {org = "ballerina", name = "jballerina.java"} +] + +[[package]] +org = "ballerina" +name = "lang.value" +version = "0.0.0" +scope = "testOnly" +dependencies = [ + {org = "ballerina", name = "jballerina.java"} +] + +[[package]] +org = "ballerina" +name = "test" +version = "0.0.0" +scope = "testOnly" +dependencies = [ + {org = "ballerina", name = "jballerina.java"}, + {org = "ballerina", name = "lang.error"} +] +modules = [ + {org = "ballerina", packageName = "test", moduleName = "test"} +] + diff --git a/ballerina/Package.md b/ballerina/Package.md new file mode 100644 index 0000000..b5158ce --- /dev/null +++ b/ballerina/Package.md @@ -0,0 +1,213 @@ +# Ballerina JSON Data Library + +The Ballerina JSON Data Library is a comprehensive toolkit designed to facilitate the handling and manipulation of JSON data within Ballerina applications. It streamlines the process of converting JSON data to native Ballerina data types, enabling developers to work with JSON content seamlessly and efficiently. + +## Features + +- **Versatile JSON Data Input**: Accept JSON data as a ballerina JSON value, a string, byte array, or a stream and convert it into a subtype of anydata. +- **JSON to anydata Value Conversion**: Transform JSON data into expected type which is subtype of anydata. +- **Projection Support**: Perform selective conversion of JSON data subsets into anydata values through projection. + +## Usage + +### Converting JSON Document value to a record value + +To convert an JSON document value to a record value, you can utilize the `parseAsType` function provided by the library. The example below showcases the transformation of an JSON document value into a record value. + +```ballerina +import ballerina/data.jsondata; +import ballerina/io; + +type Book record { + string name; + string author; + int year; +}; + +public function main() returns error? { + json jsonContent = { + "name": "Clean Code", + "author": "Robert C. Martin", + "year": 2008 + }; + + Book book = check jsondata:parseAsType(jsonContent); + io:println(book); +} +``` + +### Converting external JSON document to a record value + +For transforming JSON content from an external source into a record value, the `parseString`, `parseBytes`, `parseStream` functions can be used. This external source can be in the form of a string or a byte array/byte-block-stream that houses the JSON data. This is commonly extracted from files or network sockets. The example below demonstrates the conversion of an JSON value from an external source into a record value. + +```ballerina +import ballerina/data.jsondata; +import ballerina/io; + +type Book record { + string name; + string author; + int year; +}; + +public function main() returns error? { + string jsonContent = check io:fileReadString("path/to/file.json"); + Book book = check jsondata:parseString(jsonContent); + io:println(book); +} +``` + +Make sure to handle possible errors that may arise during the file reading or JSON to anydata conversion process. The `check` keyword is utilized to handle these errors, but more sophisticated error handling can be implemented as per your requirements. + +## JSON to anydata representation + +The conversion of JSON data to subtype of anydata representation is a fundamental feature of the library. + +### JSON Object + +The JSON Object can be represented as a value of type record/map in Ballerina which facilitates a structured and type-safe approach to handling JSON data. + +Take for instance the following JSON Object snippet: + +```json +{ + "author": "Robert C. Martin", + "books": [ + { + "name": "Clean Code", + "year": 2008 + }, + { + "name": "Clean Architecture", + "year": 2017 + } + ] +} +``` + +This JSON Object can be represented as a record value in Ballerina as follows: + +```ballerina +type Author record { + string author; + Book[] books; +}; + +type Book record { + string name; + int year; +}; + +public function main() returns error? { + json jsonContent = { + "author": "Robert C. Martin", + "books": [ + { + "name": "Clean Code", + "year": 2008 + }, + { + "name": "Clean Architecture", + "year": 2017 + } + ] + }; + + Author author = check jsondata:parseAsType(jsonContent); + io:println(author); +} +``` + +### JSON Array + +The JSON Array can be represented as an array/tuple values in Ballerina. + +```json +[ + { + "name": "Clean Code", + "year": 2008 + }, + { + "name": "Clean Architecture", + "year": 2017 + } +] +``` + +This JSON Array can be converted as an array/tuple in Ballerina as follows: + +```ballerina +type Book record { + string name; + int year; +}; + +public function main() returns error? { + json jsonContent = [ + { + "name": "Clean Code", + "year": 2008 + }, + { + "name": "Clean Architecture", + "year": 2017 + } + ]; + + Book[] bookArr = check jsondata:parseAsType(jsonContent); + io:println(bookArr); + + [Book, Book] bookTuple = check jsondata:parseAsType(jsonContent); + io:println(bookTuple); +} +``` + +### Controlling the JSON to record conversion + +The library allows for selective conversion of JSON into closed records. This is beneficial when the JSON data contains members that are not necessary to be transformed into record fields. + +```json +{ + "name": "Clean Code", + "author": "Robert C. Martin", + "year": 2008, + "publisher": "Prentice Hall" +} +``` + +The JSON data above contains `publisher` and `year` fields which are not required to be converted into a record field. + +```ballerina +type Book record {| + string name; + string author; +|}; + +public function main() returns error? { + json jsonContent = { + "name": "Clean Code", + "author": "Robert C. Martin", + "year": 2008, + "publisher": "Prentice Hall" + }; + + Book book = check jsondata:parseAsType(jsonContent); + io:println(book); +} +``` + +However, if the rest field is utilized (or if the record type is defined as an open record), all members in the JSON data will be transformed into record fields: + +```ballerina +type Book record { + string name; + string author; +} +``` + +In this instance, all other members in the JSON data, such as `year` and `publisher` will be transformed into `anydata-typed` fields with the corresponding JSON object member as the key-value pair. + +This behavior extends to arrays as well. + +The process of projecting JSON data into a record supports various use cases, including the filtering out of unnecessary members. This functionality is anticipated to be enhanced in the future to accommodate more complex scenarios, such as filtering values based on regular expressions, among others. diff --git a/ballerina/build.gradle b/ballerina/build.gradle new file mode 100644 index 0000000..ed5c3fc --- /dev/null +++ b/ballerina/build.gradle @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import org.apache.tools.ant.taskdefs.condition.Os + +buildscript { + repositories { + maven { + url = 'https://maven.pkg.github.com/ballerina-platform/plugin-gradle' + credentials { + username System.getenv("packageUser") + password System.getenv("packagePAT") + } + } + } + dependencies { + classpath "io.ballerina:plugin-gradle:${project.ballerinaGradlePluginVersion}" + } +} + +description = 'Ballerina - Data JSON Module' + +def packageName = "data.jsondata" +def packageOrg = "ballerina" + +def tomlVersion = stripBallerinaExtensionVersion("${project.version}") +def ballerinaTomlFilePlaceHolder = new File("${project.rootDir}/build-config/resources/Ballerina.toml") +def ballerinaTomlFile = new File("$project.projectDir/Ballerina.toml") +def compilerPluginTomlFilePlaceHolder = new File("${project.rootDir}/build-config/resources/CompilerPlugin.toml") +def compilerPluginTomlFile = new File("$project.projectDir/CompilerPlugin.toml") + +def stripBallerinaExtensionVersion(String extVersion) { + if (extVersion.matches(project.ext.timestampedVersionRegex)) { + def splitVersion = extVersion.split('-'); + if (splitVersion.length > 3) { + def strippedValues = splitVersion[0..-4] + return strippedValues.join('-') + } else { + return extVersion + } + } else { + return extVersion.replace("${project.ext.snapshotVersion}", "") + } +} + +apply plugin: 'io.ballerina.plugin' + +ballerina { + packageOrganization = packageOrg + module = packageName + langVersion = ballerinaLangVersion +} + +task updateTomlFiles { + doLast { + def newConfig = ballerinaTomlFilePlaceHolder.text.replace("@project.version@", project.version) + newConfig = newConfig.replace("@toml.version@", tomlVersion) + ballerinaTomlFile.text = newConfig + + def newCompilerPluginToml = compilerPluginTomlFilePlaceHolder.text.replace("@project.version@", project.version) + compilerPluginTomlFile.text = newCompilerPluginToml + } +} + +task commitTomlFiles { + doLast { + project.exec { + ignoreExitValue true + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + commandLine 'cmd', '/c', "git commit Ballerina.toml Dependencies.toml CompilerPlugin.toml -m '[Automated] Update the native jar versions'" + } else { + commandLine 'sh', '-c', "git commit Ballerina.toml Dependencies.toml CompilerPlugin.toml -m '[Automated] Update the native jar versions'" + } + } + } +} + +publishing { + publications { + maven(MavenPublication) { + artifact source: createArtifactZip, extension: 'zip' + } + } + + repositories { + maven { + name = "GitHubPackages" + url = uri("https://maven.pkg.github.com/ballerina-platform/module-${packageOrg}-${packageName}") + credentials { + username = System.getenv("publishUser") + password = System.getenv("publishPAT") + } + } + } +} + +task deleteDependencyTomlFiles { + if (project.hasProperty("deleteDependencies")) { + delete "${project.projectDir}/Dependencies.toml" + } +} + +updateTomlFiles.dependsOn copyStdlibs + +build.dependsOn "generatePomFileForMavenPublication" +build.dependsOn ":${packageName}-native:build" +build.dependsOn ":${packageName}-compiler-plugin:build" +build.dependsOn deleteDependencyTomlFiles + +test.dependsOn ":${packageName}-native:build" +test.dependsOn ":${packageName}-compiler-plugin:build" diff --git a/ballerina/init.bal b/ballerina/init.bal new file mode 100644 index 0000000..5476d5e --- /dev/null +++ b/ballerina/init.bal @@ -0,0 +1,25 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import ballerina/jballerina.java; + +isolated function init() { + setModule(); +} + +isolated function setModule() = @java:Method { + 'class: "io.ballerina.lib.data.jsondata.utils.ModuleUtils" +} external; diff --git a/ballerina/json_api.bal b/ballerina/json_api.bal new file mode 100644 index 0000000..b6cf13b --- /dev/null +++ b/ballerina/json_api.bal @@ -0,0 +1,81 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import ballerina/jballerina.java; + +# Convert value of type `json` to subtype of `anydata`. +# +# + v - Source JSON value +# + options - Options to be used for filtering in the projection +# + t - Target type +# + return - On success, returns value belonging to the given target type, else returns an `jsondata:Error` value. +public isolated function parseAsType(json v, Options options = {}, typedesc t = <>) + returns t|Error = @java:Method {'class: "io.ballerina.lib.data.jsondata.json.Native"} external; + +# Converts JSON string to subtype of anydata. +# +# + s - Source JSON string value or byte[] or byte-block-stream +# + options - Options to be used for filtering in the projection +# + t - Target type +# + return - On success, value belonging to the given target type, else returns an `jsondata:Error` value. +public isolated function parseString(string s, Options options = {}, typedesc t = <>) + returns t|Error = @java:Method {'class: "io.ballerina.lib.data.jsondata.json.Native"} external; + +# Converts JSON byte[] to subtype of anydata. +# +# + s - Source JSON byte[] +# + options - Options to be used for filtering in the projection +# + t - Target type +# + return - On success, value belonging to the given target type, else returns an `jsondata:Error` value. +public isolated function parseBytes(byte[] s, Options options = {}, typedesc t = <>) + returns t|Error = @java:Method {'class: "io.ballerina.lib.data.jsondata.json.Native"} external; + +# Converts JSON byte-block-stream to subtype of anydata. +# +# + s - Source JSON byte-block-stream +# + options - Options to be used for filtering in the projection +# + t - Target type +# + return - On success, value belonging to the given target type, else returns an `jsondata:Error` value. +public isolated function parseStream(stream s, Options options = {}, typedesc t = <>) + returns t|Error = @java:Method {'class: "io.ballerina.lib.data.jsondata.json.Native"} external; + +# Converts a value of type `anydata` to `json`. +# +# + v - Source anydata value +# + return - representation of `v` as value of type json +public isolated function toJson(anydata v) + returns json|Error = @java:Method {'class: "io.ballerina.lib.data.jsondata.json.Native"} external; + +# Represent the options that can be used to modify the behaviour of the projection. +# +# + allowDataProjection - enable or disable projection +public type Options record {| + boolean allowDataProjection = true; +|}; + +# Represents the error type of the ballerina/data.jsondata module. This error type represents any error that can occur +# during the execution of jsondata APIs. +public type Error distinct error; + +# Defines the name of the JSON Object key. +# +# + value - The name of the JSON Object key +public type NameConfig record {| + string value; +|}; + +# The annotation is used to overwrite the existing record field name. +public const annotation NameConfig Name on record field; diff --git a/ballerina/tests/from_json_string_test.bal b/ballerina/tests/from_json_string_test.bal new file mode 100644 index 0000000..b0d5eae --- /dev/null +++ b/ballerina/tests/from_json_string_test.bal @@ -0,0 +1,1560 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import ballerina/test; + +@test:Config { + dataProvider: basicTypeDataProviderForParseString +} +isolated function testJsonStringToBasicTypes(string sourceData, typedesc expType, + anydata expectedData) returns Error? { + anydata val1 = check parseString(sourceData, {}, expType); + test:assertEquals(val1, expectedData); +} + +function basicTypeDataProviderForParseString() returns [string, typedesc, anydata][] { + return [ + ["5", int, 5], + ["5.5", float, 5.5], + ["5.5", decimal, 5.5d], + ["hello", string, "hello"], + ["true", boolean, true] + ]; +} + +@test:Config +isolated function testNilAsExpectedTypeWithParseString() returns error? { + () val = check parseString("null"); + test:assertEquals(val, null); +} + +@test:Config +isolated function testSimpleJsonStringToRecord() returns Error? { + string j = string `{"a": "hello", "b": 1}`; + + SimpleRec1 recA = check parseString(j); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.b, 1); + + SimpleRec2 recB = check parseString(j); + test:assertEquals(recB.a, "hello"); + test:assertEquals(recB.b, 1); + + OpenRecord recC = check parseString(j); + test:assertEquals(recC.get("a"), "hello"); + test:assertEquals(recC.get("b"), 1); +} + +@test:Config +isolated function testSimpleJsonStringToRecordWithProjection() returns Error? { + string str = string `{"a": "hello", "b": 1}`; + + record {|string a;|} recA = check parseString(str); + test:assertEquals(recA.length(), 1); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA, {"a": "hello"}); +} + +@test:Config +isolated function testNestedJsonStringToRecord() returns Error? { + string str = string `{ + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": 2 + } + }`; + + NestedRecord1 recA = check parseString(str); + test:assertEquals(recA.length(), 3); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.b, 1); + test:assertEquals(recA.c.length(), 2); + test:assertEquals(recA.c.d, "world"); + test:assertEquals(recA.c.e, 2); + + NestedRecord2 recB = check parseString(str); + test:assertEquals(recB.length(), 3); + test:assertEquals(recB.a, "hello"); + test:assertEquals(recB.b, 1); + test:assertEquals(recB.c.length(), 2); + test:assertEquals(recB.c.d, "world"); + test:assertEquals(recB.c.e, 2); + + OpenRecord recC = check parseString(str); + test:assertEquals(recC.get("a"), "hello"); + test:assertEquals(recC.get("b"), 1); + test:assertEquals(recC.get("c"), {d: "world", e: 2}); +} + +@test:Config +isolated function testNestedJsonStringToRecordWithProjection() returns Error? { + string str = string `{ + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": 2 + } + }`; + + record {|string a; record {|string d;|} c;|} recA = check parseString(str); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.c.d, "world"); + test:assertEquals(recA, {"a": "hello", "c": {"d": "world"}}); +} + +@test:Config +isolated function testJsonStringToRecordWithOptionalFields() returns Error? { + string str = string `{"a": "hello"}`; + + record {|string a; int b?;|} recA = check parseString(str); + test:assertEquals(recA.length(), 1); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.b, null); +} + +@test:Config +isolated function testJsonStringToRecordWithOptionalFieldsWithProjection() returns Error? { + string str = string `{ + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": 2 + } + }`; + + record {|string a; record {|string d; int f?;|} c;|} recA = check parseString(str); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.c.d, "world"); + test:assertEquals(recA, {"a": "hello", "c": {"d": "world"}}); +} + +@test:Config +isolated function testParseString1() returns Error? { + string str = string `{ + "id": 2, + "name": "Anne", + "address": { + "street": "Main", + "city": "94" + } + }`; + + R x = check parseString(str); + test:assertEquals(x.id, 2); + test:assertEquals(x.name, "Anne"); + test:assertEquals(x.address.street, "Main"); + test:assertEquals(x.address.city, "94"); +} + +@test:Config +isolated function testMapTypeAsFieldTypeInRecordForJsonString() returns Error? { + string str = string `{ + "employees": { + "John": "Manager", + "Anne": "Developer" + } + }`; + + Company x = check parseString(str); + test:assertEquals(x.employees["John"], "Manager"); + test:assertEquals(x.employees["Anne"], "Developer"); +} + +@test:Config +isolated function testParseString2() returns Error? { + string str = string `{ + "name": "John", + "age": 30, + "address": { + "street": "123 Main St", + "zipcode": 10001, + "coordinates": { + "latitude": 40.7128, + "longitude": -74.0060 + } + } + }`; + + Person x = check parseString(str); + test:assertEquals(x.length(), 3); + test:assertEquals(x.name, "John"); + test:assertEquals(x.age, 30); + test:assertEquals(x.address.length(), 3); + test:assertEquals(x.address.street, "123 Main St"); + test:assertEquals(x.address.zipcode, 10001); + test:assertEquals(x.address.coordinates.length(), 2); + test:assertEquals(x.address.coordinates.latitude, 40.7128); + test:assertEquals(x.address.coordinates.longitude, -74.0060); +} + +@test:Config +isolated function testParseString3() returns Error? { + string str = string `{ + "title": "To Kill a Mockingbird", + "author": { + "name": "Harper Lee", + "birthdate": "1926-04-28", + "hometown": "Monroeville, Alabama", + "local": false + }, + "price": 10.5, + "publisher": { + "name": "J. B. Lippincott & Co.", + "year": 1960, + "location": "Philadelphia", + "month": 4 + } + }`; + + Book x = check parseString(str); + test:assertEquals(x.title, "To Kill a Mockingbird"); + test:assertEquals(x.author.name, "Harper Lee"); + test:assertEquals(x.author.birthdate, "1926-04-28"); + test:assertEquals(x.author.hometown, "Monroeville, Alabama"); + test:assertEquals(x.publisher.name, "J. B. Lippincott & Co."); + test:assertEquals(x.publisher.year, 1960); + test:assertEquals(x.publisher["location"], "Philadelphia"); + test:assertEquals(x["price"], 10.5); + test:assertEquals(x.author["local"], false); +} + +@test:Config +isolated function testParseString4() returns Error? { + string str = string `{ + "name": "School Twelve", + "city": 23, + "number": 12, + "section": 2, + "flag": true, + "tp": 12345 + }`; + + School x = check parseString(str); + test:assertEquals(x.length(), 6); + test:assertEquals(x.name, "School Twelve"); + test:assertEquals(x.number, 12); + test:assertEquals(x.flag, true); + test:assertEquals(x["section"], 2); + test:assertEquals(x["tp"], 12345); +} + +@test:Config +isolated function testParseString5() returns Error? { + string str = string `{ + "intValue": 10, + "floatValue": 10.5, + "stringValue": "test", + "decimalValue": 10.50, + "doNotParse": "abc" + }`; + + TestRecord x = check parseString(str); + test:assertEquals(x.length(), 5); + test:assertEquals(x.intValue, 10); + test:assertEquals(x.floatValue, 10.5f); + test:assertEquals(x.stringValue, "test"); + test:assertEquals(x.decimalValue, 10.50d); + test:assertEquals(x["doNotParse"], "abc"); +} + +@test:Config +isolated function testParseString6() returns Error? { + string str = string `{ + "id": 1, + "name": "Class A", + "student": { + "id": 2, + "name": "John Doe", + "school": { + "name": "ABC School", + "address": { + "street": "Main St", + "city": "New York" + } + } + }, + "teacher": { + "id": 3, + "name": "Jane Smith" + }, + "monitor": null + }`; + + Class x = check parseString(str); + test:assertEquals(x.length(), 5); + test:assertEquals(x.id, 1); + test:assertEquals(x.name, "Class A"); + test:assertEquals(x.student.length(), 3); + test:assertEquals(x.student.id, 2); + test:assertEquals(x.student.name, "John Doe"); + test:assertEquals(x.student.school.length(), 2); + test:assertEquals(x.student.school.name, "ABC School"); + test:assertEquals(x.student.school.address.length(), 2); + test:assertEquals(x.student.school.address.street, "Main St"); + test:assertEquals(x.student.school.address.city, "New York"); + test:assertEquals(x.teacher.length(), 2); + test:assertEquals(x.teacher.id, 3); + test:assertEquals(x.teacher.name, "Jane Smith"); + test:assertEquals(x.monitor, null); +} + +@test:Config +isolated function testParseString7() returns Error? { + string nestedJsonStr = string `{ + "intValue": 5, + "floatValue": 2.5, + "stringValue": "nested", + "decimalValue": 5.00 + }`; + + string str = string `{ + "intValue": 10, + "nested1": ${nestedJsonStr} + }`; + + TestRecord2 x = check parseString(str); + test:assertEquals(x.length(), 2); + test:assertEquals(x.intValue, 10); + test:assertEquals(x.nested1.length(), 4); + test:assertEquals(x.nested1.intValue, 5); +} + +@test:Config +isolated function testParseString8() returns Error? { + string str = string `{ + "street": "Main", + "city": "Mahar", + "house": 94 + }`; + + TestR x = check parseString(str); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, "Mahar"); +} + +@test:Config +isolated function testParseString9() returns Error? { + string str = string `{ + "street": "Main", + "city": "Mahar", + "houses": [94, 95, 96] + }`; + + TestArr1 x = check parseString(str); + test:assertEquals(x.length(), 3); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, "Mahar"); + test:assertEquals(x.houses, [94, 95, 96]); +} + +@test:Config +isolated function testParseString10() returns Error? { + string str = string `{ + "street": "Main", + "city": 11, + "house": [94, "Gedara"] + }`; + + TestArr2 x = check parseString(str); + test:assertEquals(x.length(), 3); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, 11); + test:assertEquals(x.house, [94, "Gedara"]); +} + +@test:Config +isolated function testParseString11() returns Error? { + string str = string `{ + "street": "Main", + "city": "Mahar", + "house": [94, [1, 2, 3]] + }`; + + TestArr3 x = check parseString(str); + test:assertEquals(x.length(), 3); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, "Mahar"); + test:assertEquals(x.house, [94, [1, 2, 3]]); +} + +@test:Config +isolated function testParseString12() returns Error? { + string str = string `{ + "street": "Main", + "city": { + "name": "Mahar", + "code": 94 + }, + "flag": true + }`; + + TestJson x = check parseString(str); + test:assertEquals(x.length(), 3); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, {"name": "Mahar", "code": 94}); +} + +@test:Config +isolated function testParseString13() returns Error? { + string str = string `{ + "street": "Main", + "city": "Mahar", + "house": [94, [1, 3, "4"]] + }`; + + TestArr3 x = check parseString(str); + test:assertEquals(x.length(), 3); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, "Mahar"); + test:assertEquals(x.house, [94, [1, 3, 4]]); +} + +@test:Config +isolated function testParseString14() returns Error? { + string str = string `{ + "id": 12, + "name": "Anne", + "address": { + "id": 34, + "city": "94", + "street": "York road" + } + }`; + + RN x = check parseString(str); + test:assertEquals(x.length(), 3); + test:assertEquals(x.id, 12); + test:assertEquals(x.name, "Anne"); + test:assertEquals(x.address.length(), 3); + test:assertEquals(x.address.id, 34); + test:assertEquals(x.address.city, "94"); + test:assertEquals(x.address.street, "York road"); +} + +@test:Config +isolated function testParseString15() returns Error? { + string str = string `[1, 2, 3]`; + + IntArr x = check parseString(str); + test:assertEquals(x, [1, 2, 3]); +} + +@test:Config +isolated function testParseString16() returns Error? { + string str = string `[1, "abc", [3, 4.0]]`; + + Tuple x = check parseString(str); + test:assertEquals(x, [1, "abc", [3, 4.0]]); +} + +@test:Config +isolated function testParseString17() returns Error? { + string str = string `{ + "street": "Main", + "city": { + "name": "Mahar", + "code": 94, + "internal": { + "id": 12, + "agent": "Anne" + } + }, + "flag": true + }`; + + TestJson x = check parseString(str); + test:assertEquals(x.length(), 3); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, {"name": "Mahar", "code": 94, "internal": {"id": 12, "agent": "Anne"}}); +} + +@test:Config +isolated function testParseString18() returns Error? { + string str = string `{ + "books": [ + { + "title": "The Great Gatsby", + "author": "F. Scott Fitzgerald" + }, + { + "title": "The Grapes of Wrath", + "author": "John Steinbeck" + }, + { + "title": "Binary Echoes: Unraveling the Digital Web", + "author": "Alexandra Quinn" + } + ] + }`; + + Library x = check parseString(str); + test:assertEquals(x.books.length(), 2); + test:assertEquals(x.books[0].title, "The Great Gatsby"); + test:assertEquals(x.books[0].author, "F. Scott Fitzgerald"); + test:assertEquals(x.books[1].title, "The Grapes of Wrath"); + test:assertEquals(x.books[1].author, "John Steinbeck"); +} + +type LibraryB record { + [BookA, BookA] books; +}; + +type LibraryC record {| + [BookA, BookA...] books; +|}; + +@test:Config +isolated function testParseString19() returns Error? { + string str = string `{ + "books": [ + { + "title": "The Great Gatsby", + "author": "F. Scott Fitzgerald" + }, + { + "title": "The Grapes of Wrath", + "author": "John Steinbeck" + }, + { + "title": "Binary Echoes: Unraveling the Digital Web", + "author": "Alexandra Quinn" + } + ] + }`; + + LibraryB x = check parseString(str); + test:assertEquals(x.books.length(), 2); + test:assertEquals(x.books[0].title, "The Great Gatsby"); + test:assertEquals(x.books[0].author, "F. Scott Fitzgerald"); + test:assertEquals(x.books[1].title, "The Grapes of Wrath"); + test:assertEquals(x.books[1].author, "John Steinbeck"); + + LibraryC y = check parseString(str); + test:assertEquals(y.books.length(), 3); + test:assertEquals(y.books[0].title, "The Great Gatsby"); + test:assertEquals(y.books[0].author, "F. Scott Fitzgerald"); + test:assertEquals(y.books[1].title, "The Grapes of Wrath"); + test:assertEquals(y.books[1].author, "John Steinbeck"); + test:assertEquals(y.books[2].title, "Binary Echoes: Unraveling the Digital Web"); + test:assertEquals(y.books[2].author, "Alexandra Quinn"); +} + +@test:Config +isolated function testParseString20() returns Error? { + string str1 = string `{ + "a": { + "c": "world", + "d": "2" + }, + "b": { + "c": "world", + "d": "2" + } + }`; + + record {| + record {| + string c; + string d; + |}...; + |} val1 = check parseString(str1); + test:assertEquals(val1.length(), 2); + test:assertEquals(val1["a"]["c"], "world"); + test:assertEquals(val1["a"]["d"], "2"); + test:assertEquals(val1["b"]["c"], "world"); + test:assertEquals(val1["b"]["d"], "2"); + + record {| + map...; + |} val2 = check parseString(str1); + test:assertEquals(val2.length(), 2); + test:assertEquals(val2["a"]["c"], "world"); + test:assertEquals(val2["a"]["d"], "2"); + test:assertEquals(val2["b"]["c"], "world"); + test:assertEquals(val2["b"]["d"], "2"); + + string str3 = string `{ + "a": [{ + "c": "world", + "d": "2" + }], + "b": [{ + "c": "world", + "d": "2" + }] + }`; + + record {| + record {| + string c; + string d; + |}[]...; + |} val3 = check parseString(str3); + test:assertEquals(val3.length(), 2); + test:assertEquals(val3["a"], [ + { + "c": "world", + "d": "2" + } + ]); + test:assertEquals(val3["b"], [ + { + "c": "world", + "d": "2" + } + ]); +} + +@test:Config +isolated function testUnionTypeAsExpTypeForParseString() returns Error? { + decimal|float val1 = check parseString("1.0"); + test:assertEquals(val1, 1.0); + + string str2 = string `{ + "a": "hello", + "b": 1 + }`; + + record {| + decimal|float b; + |} val2 = check parseString(str2); + test:assertEquals(val2.length(), 1); + test:assertEquals(val2.b, 1.0); + + string str3 = string `{ + "a": { + "b": 1, + "d": { + "e": "false" + } + }, + "c": 2 + }`; + + record {| + record {|decimal|int b; record {|string|boolean e;|} d;|} a; + decimal|float c; + |} val3 = check parseString(str3); + test:assertEquals(val3.length(), 2); + test:assertEquals(val3.a.length(), 2); + test:assertEquals(val3.a.b, 1); + test:assertEquals(val3.a.d.e, false); + test:assertEquals(val3.c, 2.0); +} + +@test:Config +isolated function testAnydataAsExpTypeForParseString() returns Error? { + string jsonStr1 = string `1`; + anydata val1 = check parseString(jsonStr1); + test:assertEquals(val1, 1); + + string jsonStr2 = string `{ + "a": "hello", + "b": 1 + }`; + + anydata val2 = check parseString(jsonStr2); + test:assertEquals(val2, {"a": "hello", "b": 1}); + + string jsonStr3 = string `{ + "a": { + "b": 1, + "d": { + "e": "hello" + } + }, + "c": 2 + }`; + + anydata val3 = check parseString(jsonStr3); + test:assertEquals(val3, {"a": {"b": 1, "d": {"e": "hello"}}, "c": 2}); + + string jsonStr4 = string `{ + "a": [{ + "b": 1, + "d": { + "e": "hello" + } + }], + "c": 2 + }`; + + anydata val4 = check parseString(jsonStr4); + test:assertEquals(val4, {"a": [{"b": 1, "d": {"e": "hello"}}], "c": 2}); + + string str5 = string `[[1], 2]`; + anydata val5 = check parseString(str5); + test:assertEquals(val5, [[1], 2]); +} + +@test:Config +isolated function testJsonAsExpTypeForParseString() returns Error? { + string jsonStr1 = string `1`; + json val1 = check parseString(jsonStr1); + test:assertEquals(val1, 1); + + string jsonStr2 = string `{ + "a": "hello", + "b": 1 + }`; + + json val2 = check parseString(jsonStr2); + test:assertEquals(val2, {"a": "hello", "b": 1}); + + string jsonStr3 = string `{ + "a": { + "b": 1, + "d": { + "e": "hello" + } + }, + "c": 2 + }`; + + json val3 = check parseString(jsonStr3); + test:assertEquals(val3, {"a": {"b": 1, "d": {"e": "hello"}}, "c": 2}); + + string jsonStr4 = string `{ + "a": [{ + "b": 1, + "d": { + "e": "hello" + } + }], + "c": 2 + }`; + + json val4 = check parseString(jsonStr4); + test:assertEquals(val4, {"a": [{"b": 1, "d": {"e": "hello"}}], "c": 2}); + + string str5 = string `[[1], 2]`; + json val5 = check parseString(str5); + test:assertEquals(val5, [[1], 2]); +} + +@test:Config +isolated function testMapAsExpTypeForParseString() returns Error? { + string jsonStr1 = string `{ + "a": "hello", + "b": 1 + }`; + + map val1 = check parseString(jsonStr1); + test:assertEquals(val1, {"a": "hello", "b": "1"}); + + string jsonStr2 = string `{ + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": 2 + } + }`; + record {| + string a; + int b; + map c; + |} val2 = check parseString(jsonStr2); + test:assertEquals(val2.a, "hello"); + test:assertEquals(val2.b, 1); + test:assertEquals(val2.c, {"d": "world", "e": "2"}); + + string jsonStr3 = string `{ + "a": { + "c": "world", + "d": 2 + }, + "b": { + "c": "world", + "d": 2 + } + }`; + + map> val3 = check parseString(jsonStr3); + test:assertEquals(val3, {"a": {"c": "world", "d": "2"}, "b": {"c": "world", "d": "2"}}); + + record {| + map a; + |} val4 = check parseString(jsonStr3); + test:assertEquals(val4.a, {"c": "world", "d": "2"}); + + map val5 = check parseString(jsonStr3); + test:assertEquals(val5, {"a": {"c": "world", "d": 2}, "b": {"c": "world", "d": 2}}); + + string jsonStr6 = string `{ + "a": Kanth, + "b": { + "g": { + "c": "hello", + "d": 1 + }, + "h": { + "c": "world", + "d": 2 + } + } + }`; + record {| + string a; + map> b; + |} val6 = check parseString(jsonStr6); + test:assertEquals(val6.a, "Kanth"); + test:assertEquals(val6.b, {"g": {"c": "hello", "d": "1"}, "h": {"c": "world", "d": "2"}}); +} + +@test:Config +isolated function testProjectionInTupleForParseString() returns Error? { + string str1 = string `[1, 2, 3, 4, 5, 8]`; + [string, float] val1 = check parseString(str1); + test:assertEquals(val1, ["1", 2.0]); + + string str2 = string `{ + "a": [1, 2, 3, 4, 5, 8] + }`; + record {|[string, float] a;|} val2 = check parseString(str2); + test:assertEquals(val2.a, ["1", 2.0]); + + string str3 = string `[1, "4"]`; + [float] val3 = check parseString(str3); + test:assertEquals(val3, [1.0]); + + string str4 = string `["1", {}]`; + [float] val4 = check parseString(str4); + test:assertEquals(val4, [1.0]); + + string str5 = string `["1", [], {"name": 1}]`; + [float] val5 = check parseString(str5); + test:assertEquals(val5, [1.0]); +} + +@test:Config +isolated function testProjectionInArrayForParseString() returns Error? { + string strVal = string `[1, 2, 3, 4, 5]`; + int[] val = check parseString(strVal); + test:assertEquals(val, [1, 2, 3, 4, 5]); + + string strVal2 = string `[1, 2, 3, 4, 5]`; + int[2] val2 = check parseString(strVal2); + test:assertEquals(val2, [1, 2]); + + string strVal3 = string `{ + "a": [1, 2, 3, 4, 5] + }`; + record {|int[2] a;|} val3 = check parseString(strVal3); + test:assertEquals(val3, {a: [1, 2]}); + + string strVal4 = string `{ + "a": [1, 2, 3, 4, 5], + "b": [1, 2, 3, 4, 5] + }`; + record {|int[2] a; int[3] b;|} val4 = check parseString(strVal4); + test:assertEquals(val4, {a: [1, 2], b: [1, 2, 3]}); + + string strVal5 = string `{ + "employees": [ + { "name": "Prakanth", + "age": 26 + }, + { "name": "Kevin", + "age": 25 + } + ] + }`; + record {|record {|string name; int age;|}[1] employees;|} val5 = check parseString(strVal5); + test:assertEquals(val5, {employees: [{name: "Prakanth", age: 26}]}); + + string strVal6 = string `["1", 2, 3, { "a" : val_a }]`; + int[3] val6 = check parseString(strVal6); + test:assertEquals(val6, [1, 2, 3]); +} + +@test:Config +isolated function testProjectionInRecordForParseString() returns Error? { + string jsonStr1 = string `{"name": "John", "age": 30, "city": "New York"}`; + record {|string name; string city;|} val1 = check parseString(jsonStr1); + test:assertEquals(val1, {name: "John", city: "New York"}); + + string jsonStr2 = string `{"name": John, "age": "30", "city": "New York"}`; + record {|string name; string city;|} val2 = check parseString(jsonStr2); + test:assertEquals(val2, {name: "John", city: "New York"}); + + string jsonStr3 = string `{ "name": "John", + "company": { + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }, + "city": "New York" }`; + record {|string name; string city;|} val3 = check parseString(jsonStr3); + test:assertEquals(val3, {name: "John", city: "New York"}); + + string jsonStr4 = string `{ "name": "John", + "company": [{ + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }], + "city": "New York" }`; + record {|string name; string city;|} val4 = check parseString(jsonStr4); + test:assertEquals(val4, {name: "John", city: "New York"}); + + string jsonStr5 = string `{ "name": "John", + "company1": [{ + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }], + "city": "New York", + "company2": [{ + "name": "amzn", + "year": 2024, + "addrees": { + "street": "123", + "city": "Miami" + } + }] + }`; + record {|string name; string city;|} val5 = check parseString(jsonStr5); + test:assertEquals(val5, {name: "John", city: "New York"}); +} + +@test:Config +isolated function testArrayOrTupleCaseForParseString() returns Error? { + string jsonStr1 = string `[["1"], 2.0]`; + [[int], float] val1 = check parseString(jsonStr1); + test:assertEquals(val1, [[1], 2.0]); + + string jsonStr2 = string `[["1", 2], 2.0]`; + [[int, float], string] val2 = check parseString(jsonStr2); + test:assertEquals(val2, [[1, 2.0], "2.0"]); + + string jsonStr3 = string `[["1", 2], [2, "3"]]`; + int[][] val3 = check parseString(jsonStr3); + test:assertEquals(val3, [[1, 2], [2, 3]]); + + string jsonStr4 = string `{"val" : [[1, 2], "2.0", 3.0, [5, 6]]}`; + record {| + [[int, float], string, float, [string, int]] val; + |} val4 = check parseString(jsonStr4); + test:assertEquals(val4, {val: [[1, 2.0], "2.0", 3.0, ["5", 6]]}); + + string jsonStr41 = string `{"val1" : [[1, 2], "2.0", 3.0, [5, 6]], "val2" : [[1, 2], "2.0", 3.0, [5, 6]]}`; + record {| + [[int, float], string, float, [string, int]] val1; + [[float, float], string, float, [string, float]] val2; + |} val41 = check parseString(jsonStr41); + test:assertEquals(val41, {val1: [[1, 2.0], "2.0", 3.0, ["5", 6]], val2: [[1.0, 2.0], "2.0", 3.0, ["5", 6.0]]}); + + string jsonStr5 = string `{"val" : [["1", 2], [2, "3"]]}`; + record {| + int[][] val; + |} val5 = check parseString(jsonStr5); + test:assertEquals(val5, {val: [[1, 2], [2, 3]]}); + + string jsonStr6 = string `[{"val" : [["1", 2], [2, "3"]]}]`; + [record {|int[][] val;|}] val6 = check parseString(jsonStr6); + test:assertEquals(val6, [{val: [[1, 2], [2, 3]]}]); +} + +@test:Config +isolated function testListFillerValuesWithParseString() returns Error? { + int[2] jsonVal1 = check parseString("[1]"); + test:assertEquals(jsonVal1, [1, 0]); + + [int, float, string, boolean] jsonVal2 = check parseString("[1]"); + test:assertEquals(jsonVal2, [1, 0.0, "", false]); + + record {| + float[3] A; + [int, decimal, float, boolean] B; + |} jsonVal3 = check parseString(string `{"A": [1], "B": [1]}`); + test:assertEquals(jsonVal3, {A: [1.0, 0.0, 0.0], B: [1, 0d, 0.0, false]}); +} + +@test:Config +isolated function testSingletonAsExpectedTypeForParseString() returns Error? { + "1" val1 = check parseString("1"); + test:assertEquals(val1, "1"); + + Singleton1 val2 = check parseString("1"); + test:assertEquals(val2, 1); + + SingletonUnion val3 = check parseString("1"); + test:assertEquals(val3, 1); + + () val4 = check parseString("null"); + test:assertEquals(val4, ()); + + string str5 = string `{ + "value": "1", + "id": "3" + }`; + SingletonInRecord val5 = check parseString(str5); + test:assertEquals(val5.id, "3"); + test:assertEquals(val5.value, 1); +} + +@test:Config +function testDuplicateKeyInTheStringSource() returns Error? { + string str = string `{ + "id": 1, + "name": "Anne", + "id": 2 + }`; + + record { + int id; + string name; + } employee = check parseString(str); + test:assertEquals(employee.length(), 2); + test:assertEquals(employee.id, 2); + test:assertEquals(employee.name, "Anne"); +} + +@test:Config +function testNameAnnotationWithParseString() returns Error? { + string jsonStr = string `{ + "id": 1, + "title-name": "Harry Potter", + "author-name": "J.K. Rowling" + }`; + + Book2 book = check parseString(jsonStr); + test:assertEquals(book.id, 1); + test:assertEquals(book.title, "Harry Potter"); + test:assertEquals(book.author, "J.K. Rowling"); +} + +@test:Config +isolated function testByteAsExpectedTypeForParseString() returns Error? { + byte val1 = check parseString("1"); + test:assertEquals(val1, 1); + + [byte, int] val2 = check parseString("[255, 2000]"); + test:assertEquals(val2, [255, 2000]); + + string str4 = string `{ + "id": 1, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": 2 + } + }`; + + record { + byte id; + string name; + record { + string street; + string city; + byte id; + } address; + } val4 = check parseString(str4); + test:assertEquals(val4.length(), 3); + test:assertEquals(val4.id, 1); + test:assertEquals(val4.name, "Anne"); + test:assertEquals(val4.address.length(), 3); + test:assertEquals(val4.address.street, "Main"); + test:assertEquals(val4.address.city, "94"); + test:assertEquals(val4.address.id, 2); +} + +@test:Config +isolated function testSignedInt8AsExpectedTypeForParseString() returns Error? { + int:Signed8 val1 = check parseString("-128"); + test:assertEquals(val1, -128); + + int:Signed8 val2 = check parseString("127"); + test:assertEquals(val2, 127); + + [int:Signed8, int] val3 = check parseString("[127, 2000]"); + test:assertEquals(val3, [127, 2000]); + + string str4 = string `{ + "id": 100, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": -2 + } + }`; + + record { + int:Signed8 id; + string name; + record { + string street; + string city; + int:Signed8 id; + } address; + } val4 = check parseString(str4); + test:assertEquals(val4.length(), 3); + test:assertEquals(val4.id, 100); + test:assertEquals(val4.name, "Anne"); + test:assertEquals(val4.address.length(), 3); + test:assertEquals(val4.address.street, "Main"); + test:assertEquals(val4.address.city, "94"); + test:assertEquals(val4.address.id, -2); +} + +@test:Config +isolated function testSignedInt16AsExpectedTypeForParseString() returns Error? { + int:Signed16 val1 = check parseString("-32768"); + test:assertEquals(val1, -32768); + + int:Signed16 val2 = check parseString("32767"); + test:assertEquals(val2, 32767); + + [int:Signed16, int] val3 = check parseString("[32767, -324234]"); + test:assertEquals(val3, [32767, -324234]); + + string str4 = string `{ + "id": 100, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": -2 + } + }`; + + record { + int:Signed16 id; + string name; + record { + string street; + string city; + int:Signed16 id; + } address; + } val4 = check parseString(str4); + test:assertEquals(val4.length(), 3); + test:assertEquals(val4.id, 100); + test:assertEquals(val4.name, "Anne"); + test:assertEquals(val4.address.length(), 3); + test:assertEquals(val4.address.street, "Main"); + test:assertEquals(val4.address.city, "94"); + test:assertEquals(val4.address.id, -2); +} + +@test:Config +isolated function testSignedInt32AsExpectedTypeForParseString() returns Error? { + int:Signed32 val1 = check parseString("-2147483648"); + test:assertEquals(val1, -2147483648); + + int:Signed32 val2 = check parseString("2147483647"); + test:assertEquals(val2, 2147483647); + + int:Signed32[] val3 = check parseString("[2147483647, -2147483648]"); + test:assertEquals(val3, [2147483647, -2147483648]); + + string str4 = string `{ + "id": 2147483647, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": -2147483648 + } + }`; + + record { + int:Signed32 id; + string name; + record { + string street; + string city; + int:Signed32 id; + } address; + } val4 = check parseString(str4); + test:assertEquals(val4.length(), 3); + test:assertEquals(val4.id, 2147483647); + test:assertEquals(val4.name, "Anne"); + test:assertEquals(val4.address.length(), 3); + test:assertEquals(val4.address.street, "Main"); + test:assertEquals(val4.address.city, "94"); + test:assertEquals(val4.address.id, -2147483648); +} + +@test:Config +isolated function testUnSignedInt8AsExpectedTypeForParseString() returns Error? { + int:Unsigned8 val1 = check parseString("255"); + test:assertEquals(val1, 255); + + int:Unsigned8 val2 = check parseString("0"); + test:assertEquals(val2, 0); + + int:Unsigned8[] val3 = check parseString("[0, 255]"); + test:assertEquals(val3, [0, 255]); + + string str4 = string `{ + "id": 0, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": 255 + } + }`; + + record { + int:Unsigned8 id; + string name; + record { + string street; + string city; + int:Unsigned8 id; + } address; + } val4 = check parseString(str4); + test:assertEquals(val4.length(), 3); + test:assertEquals(val4.id, 0); + test:assertEquals(val4.name, "Anne"); + test:assertEquals(val4.address.length(), 3); + test:assertEquals(val4.address.street, "Main"); + test:assertEquals(val4.address.city, "94"); + test:assertEquals(val4.address.id, 255); +} + +@test:Config +isolated function testUnSignedInt16AsExpectedTypeForParseString() returns Error? { + int:Unsigned16 val1 = check parseString("65535"); + test:assertEquals(val1, 65535); + + int:Unsigned16 val2 = check parseString("0"); + test:assertEquals(val2, 0); + + int:Unsigned16[] val3 = check parseString("[0, 65535]"); + test:assertEquals(val3, [0, 65535]); + + string str4 = string `{ + "id": 0, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": 65535 + } + }`; + + record { + int:Unsigned16 id; + string name; + record { + string street; + string city; + int:Unsigned16 id; + } address; + } val4 = check parseString(str4); + test:assertEquals(val4.length(), 3); + test:assertEquals(val4.id, 0); + test:assertEquals(val4.name, "Anne"); + test:assertEquals(val4.address.length(), 3); + test:assertEquals(val4.address.street, "Main"); + test:assertEquals(val4.address.city, "94"); + test:assertEquals(val4.address.id, 65535); +} + +@test:Config +isolated function testUnSignedInt32AsExpectedTypeForParseString() returns Error? { + int:Unsigned32 val1 = check parseString("4294967295"); + test:assertEquals(val1, 4294967295); + + int:Unsigned32 val2 = check parseString("0"); + test:assertEquals(val2, 0); + + int:Unsigned32[] val3 = check parseString("[0, 4294967295]"); + test:assertEquals(val3, [0, 4294967295]); + + string str4 = string `{ + "id": 0, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": 4294967295 + } + }`; + + record { + int:Unsigned32 id; + string name; + record { + string street; + string city; + int:Unsigned32 id; + } address; + } val4 = check parseString(str4); + test:assertEquals(val4.length(), 3); + test:assertEquals(val4.id, 0); + test:assertEquals(val4.name, "Anne"); + test:assertEquals(val4.address.length(), 3); + test:assertEquals(val4.address.street, "Main"); + test:assertEquals(val4.address.city, "94"); + test:assertEquals(val4.address.id, 4294967295); +} + +@test:Config +isolated function testUnalignedJsonContent() returns error? { + string jsonStr = string ` +{ + "a" + : + "h +ello", + "b": + 1 + }`; + record {| + string a; + int b; + |} val = check parseString(jsonStr); + test:assertEquals(val.a, "h\nello"); + test:assertEquals(val.b, 1); +} + +@test:Config +isolated function testParseStringNegative1() returns Error? { + string str = string `{ + "id": 12, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": true + } + }`; + + RN|Error x = parseString(str); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "incompatible value 'true' for type 'int' in field 'address.id'"); +} + +@test:Config +isolated function testParseStringNegative2() returns Error? { + string str = string `{ + "id": 12 + }`; + + RN2|Error x = parseString(str); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "required field 'name' not present in JSON"); +} + +@test:Config +isolated function testParseStringNegative3() returns Error? { + string str = string `{ + "id": 12, + "name": "Anne", + "address": { + "street": "Main", + "city": "94" + } + }`; + + RN|Error x = parseString(str); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "required field 'id' not present in JSON"); +} + +@test:Config +isolated function testParseStringNegative4() returns Error? { + string str = string `{ + name: "John" + }`; + + int|Error x = parseString(str); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "invalid type 'int' expected 'map type'"); + + Union|Error y = parseString(str); + test:assertTrue(y is Error); + test:assertEquals((y).message(), "unsupported type 'ballerina/data.jsondata:0:Union'"); + + table|Error z = parseString(str); + test:assertTrue(z is Error); + test:assertEquals((z).message(), "unsupported type 'table'"); + + RN2|Error a = parseString("1"); + test:assertTrue(a is Error); + test:assertEquals((a).message(), "incompatible expected type 'data.jsondata:RN2' for value '1'"); +} + +@test:Config +isolated function testDuplicateFieldInRecordTypeWithParseString() returns Error? { + string str = string `{ + "title": "Clean Code", + "author": "Robert C. Martin", + `; + + BookN|Error x = parseString(str); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "duplicate field 'author'"); +} + +@test:Config +isolated function testProjectionInArrayNegativeForParseString() { + string strVal1 = string `["1", 2, 3, { "a" : val_a }]`; + int[]|Error val1 = parseString(strVal1); + test:assertTrue(val1 is Error); + test:assertEquals((val1).message(), "invalid type 'int' expected 'map type'"); +} + +@test:Config +isolated function testUnionTypeAsExpTypeForParseStringNegative() { + string str1 = string `[ + 123, + "Lakshan", + { + "city": "Colombo", + "street": "123", + "zip": 123 + }, + { + "code": 123, + "subject": "Bio" + } + ]`; + (map|int|float)[]|Error err1 = parseString(str1); + test:assertTrue(err1 is Error); + test:assertEquals((err1).message(), "incompatible expected type '(map|int|float)' for value 'Lakshan'"); + + string str2 = string `[ + { + "city": "Colombo", + "street": "123", + "zip": 123 + }, + { + "code": 123, + "subject": "Bio" + } + ]`; + (map|int|float)[]|Error err2 = parseString(str2); + test:assertTrue(err2 is Error); + test:assertEquals((err2).message(), "unsupported type '(map|int|float)'"); + + string str3 = string `{ + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": 2 + } + }`; + (map|int|float)|Error err3 = parseString(str3); + test:assertTrue(err3 is Error); + test:assertEquals((err3).message(), "unsupported type '(map|int|float)'"); +} + +@test:Config { + dataProvider: dataProviderForSubTypeOfIntNegativeTestForParseString +} +isolated function testSubTypeOfIntAsExptypeNegative(string sourceData, typedesc expType, string expectedError) { + anydata|Error err = parseString(sourceData, {}, expType); + test:assertTrue(err is Error); + test:assertEquals((err).message(), expectedError); +} + +function dataProviderForSubTypeOfIntNegativeTestForParseString() returns [string, typedesc, string][] { + string incompatibleStr = "incompatible expected type "; + return [ + ["256", byte, incompatibleStr + "'byte' for value '256'"], + ["-1", byte, incompatibleStr + "'byte' for value '-1'"], + ["128", int:Signed8, incompatibleStr + "'lang.int:Signed8' for value '128'"], + ["-129", int:Signed8, incompatibleStr + "'lang.int:Signed8' for value '-129'"], + ["256", int:Unsigned8, incompatibleStr + "'lang.int:Unsigned8' for value '256'"], + ["-1", int:Unsigned8, incompatibleStr + "'lang.int:Unsigned8' for value '-1'"], + ["32768", int:Signed16, incompatibleStr + "'lang.int:Signed16' for value '32768'"], + ["-32769", int:Signed16, incompatibleStr + "'lang.int:Signed16' for value '-32769'"], + ["65536", int:Unsigned16, incompatibleStr + "'lang.int:Unsigned16' for value '65536'"], + ["-1", int:Unsigned16, incompatibleStr + "'lang.int:Unsigned16' for value '-1'"], + ["2147483648", int:Signed32, incompatibleStr + "'lang.int:Signed32' for value '2147483648'"], + ["-2147483649", int:Signed32, incompatibleStr + "'lang.int:Signed32' for value '-2147483649'"], + ["4294967296", int:Unsigned32, incompatibleStr + "'lang.int:Unsigned32' for value '4294967296'"], + ["-1", int:Unsigned32, incompatibleStr + "'lang.int:Unsigned32' for value '-1'"] + ]; +} + +@test:Config +isolated function testEmptyJsonDocumentNegative() { + string|Error err = parseString(""); + test:assertTrue(err is Error); + test:assertEquals((err).message(), "'empty JSON document' at line: '1' column: '1'"); +} + +@test:Config +isolated function testRecordWithRestAsExpectedTypeForParseStringNegative() { + string personStr = string ` + { + "id": 1, + "name": "Anne", + "measurements": { + "height": 5.5, + "weight": 60 + } + }`; + + PersonA|error val = parseString(personStr); + test:assertTrue(val is error); + test:assertEquals((val).message(), "incompatible expected type 'int' for value '5.5'"); +} + +@test:Config +function testComplexTypeAsUnionMemberAsExpTypeNegative() { + string str1 = string `[ + { + "p1":"v1", + "p2":1 + }, + { + "p1":"v2", + "p2":true + } + ]`; + T1|error t1 = parseString(str1); + test:assertTrue(t1 is error); + test:assertEquals((t1).message(), "unsupported type '(map|int|boolean)'"); + + string str2 = string ` + { + "p1":"v1", + "p2": { + "a": 1, + "b": 2 + } + }`; + T2|error t2 = parseString(str2); + test:assertTrue(t2 is error); + test:assertEquals((t2).message(), "unsupported type '(map|int)'"); +} diff --git a/ballerina/tests/from_json_test.bal b/ballerina/tests/from_json_test.bal new file mode 100644 index 0000000..e663db9 --- /dev/null +++ b/ballerina/tests/from_json_test.bal @@ -0,0 +1,1228 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import ballerina/test; + +@test:Config { + dataProvider: dataProviderForBasicTypeForParseAsType +} +isolated function testJsonToBasicTypes(json sourceData, typedesc expType, anydata expResult) returns Error? { + anydata result = check parseAsType(sourceData, {}, expType); + test:assertEquals(result, expResult); +} + +function dataProviderForBasicTypeForParseAsType() returns [json, typedesc, anydata][] { + return [ + [5, int, 5], + [5.5, float, 5.5], + [5.5, decimal, 5.5d], + ["hello", string, "hello"], + [true, boolean, true], + [1.5, decimal, 1.5d], + ["", string, ""], + [1.5, decimal, 1.5d], + [1.5, float, 1.5f], + [1.5, decimal, 1.5d], + [1.5, float, 1.5f], + [1.5, int, 2] + ]; +} + +@test:Config +isolated function testNilAsExpectedTypeWithParseAsType() returns error? { + () val1 = check parseAsType(null); + test:assertEquals(val1, ()); + + () val2 = check parseAsType(()); + test:assertEquals(val2, ()); +} + +@test:Config +isolated function testSimpleJsonToRecord() returns Error? { + json j = {"a": "hello", "b": 1}; + + SimpleRec1 recA = check parseAsType(j); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.b, 1); + + SimpleRec2 recB = check parseAsType(j); + test:assertEquals(recB.a, "hello"); + test:assertEquals(recB.b, 1); + + OpenRecord recC = check parseAsType(j); + test:assertEquals(recC.get("a"), "hello"); + test:assertEquals(recC.get("b"), 1); +} + +@test:Config +isolated function testSimpleJsonToRecordWithProjection() returns Error? { + json j = {"a": "hello", "b": 1}; + + record {|string a;|} recA = check parseAsType(j); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA, {"a": "hello"}); +} + +@test:Config +isolated function testNestedJsonToRecord() returns Error? { + json j = { + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": 2 + } + }; + + NestedRecord1 recA = check parseAsType(j); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.b, 1); + test:assertEquals(recA.c.d, "world"); + test:assertEquals(recA.c.e, 2); + + NestedRecord2 recB = check parseAsType(j); + test:assertEquals(recB.a, "hello"); + test:assertEquals(recB.b, 1); + test:assertEquals(recB.c.d, "world"); + test:assertEquals(recB.c.e, 2); + + OpenRecord recC = check parseAsType(j); + test:assertEquals(recC.get("a"), "hello"); + test:assertEquals(recC.get("b"), 1); + test:assertEquals(recC.get("c"), {d: "world", e: 2}); +} + +@test:Config +isolated function testNestedJsonToRecordWithProjection() returns Error? { + json j = { + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": 2 + } + }; + + record {|string a; record {|string d;|} c;|} recA = check parseAsType(j); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.c.d, "world"); + test:assertEquals(recA, {"a": "hello", "c": {"d": "world"}}); +} + +@test:Config +isolated function testJsonToRecordWithOptionalFields() returns Error? { + json j = {"a": "hello"}; + + record {|string a; int b?;|} recA = check parseAsType(j); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.b, null); +} + +@test:Config +isolated function testJsonToRecordWithOptionalFieldsWithProjection() returns Error? { + json j = { + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": 2 + } + }; + + record {|string a; record {|string d; int f?;|} c;|} recA = check parseAsType(j); + test:assertEquals(recA.a, "hello"); + test:assertEquals(recA.c.d, "world"); + test:assertEquals(recA, {"a": "hello", "c": {"d": "world"}}); +} + +@test:Config +isolated function testParseAsType1() returns Error? { + json jsonContent = { + "id": 2, + "name": "Anne", + "address": { + "street": "Main", + "city": "94" + } + }; + + R x = check parseAsType(jsonContent); + test:assertEquals(x.id, 2); + test:assertEquals(x.name, "Anne"); + test:assertEquals(x.address.street, "Main"); + test:assertEquals(x.address.city, "94"); +} + +@test:Config +isolated function testMapTypeAsFieldTypeInRecord() returns Error? { + json jsonContent = { + "employees": { + "John": "Manager", + "Anne": "Developer" + } + }; + + Company x = check parseAsType(jsonContent); + test:assertEquals(x.employees["John"], "Manager"); + test:assertEquals(x.employees["Anne"], "Developer"); +} + +@test:Config +isolated function testParseAsType2() returns Error? { + json jsonContent = { + "name": "John", + "age": 30, + "address": { + "street": "123 Main St", + "zipcode": 10001, + "coordinates": { + "latitude": 40.7128, + "longitude": -74.0060 + } + } + }; + + Person x = check parseAsType(jsonContent); + test:assertEquals(x.name, "John"); + test:assertEquals(x.age, 30); + test:assertEquals(x.address.street, "123 Main St"); + test:assertEquals(x.address.zipcode, 10001); + test:assertEquals(x.address.coordinates.latitude, 40.7128); + test:assertEquals(x.address.coordinates.longitude, -74.0060); +} + +@test:Config +isolated function testParseAsType3() returns Error? { + json jsonContent = { + "title": "To Kill a Mockingbird", + "author": { + "name": "Harper Lee", + "birthdate": "1926-04-28", + "hometown": "Monroeville, Alabama", + "local": false + }, + "price": 10.5, + "publisher": { + "name": "J. B. Lippincott & Co.", + "year": 1960, + "location": "Philadelphia", + "month": "April" + } + }; + + Book x = check parseAsType(jsonContent); + test:assertEquals(x.title, "To Kill a Mockingbird"); + test:assertEquals(x.author.name, "Harper Lee"); + test:assertEquals(x.author.birthdate, "1926-04-28"); + test:assertEquals(x.author.hometown, "Monroeville, Alabama"); + test:assertEquals(x.publisher.name, "J. B. Lippincott & Co."); + test:assertEquals(x.publisher.year, 1960); + test:assertEquals(x.publisher["month"], "April"); + test:assertEquals(x.publisher["location"], "Philadelphia"); + test:assertEquals(x["price"], 10.5); + test:assertEquals(x.author["local"], false); +} + +@test:Config +isolated function testParseAsType4() returns Error? { + json jsonContent = { + "name": "School Twelve", + "city": 23, + "number": 12, + "section": 2, + "flag": true, + "tp": 12345 + }; + + School x = check parseAsType(jsonContent); + test:assertEquals(x.name, "School Twelve"); + test:assertEquals(x.number, 12); + test:assertEquals(x.flag, true); + test:assertEquals(x["section"], 2); + test:assertEquals(x["tp"], 12345); +} + +@test:Config +isolated function testParseAsType5() returns Error? { + json jsonContent = { + "intValue": 10, + "floatValue": 10.5, + "stringValue": "test", + "decimalValue": 10.50, + "doNotParse": "abc" + }; + + TestRecord x = check parseAsType(jsonContent); + test:assertEquals(x.intValue, 10); + test:assertEquals(x.floatValue, 10.5f); + test:assertEquals(x.stringValue, "test"); + test:assertEquals(x.decimalValue, 10.50d); + test:assertEquals(x["doNotParse"], "abc"); +} + +@test:Config +isolated function testParseAsType6() returns Error? { + json jsonContent = { + "id": 1, + "name": "Class A", + "student": { + "id": 2, + "name": "John Doe", + "school": { + "name": "ABC School", + "address": { + "street": "Main St", + "city": "New York" + } + } + }, + "teacher": { + "id": 3, + "name": "Jane Smith" + }, + "monitor": null + }; + + Class x = check parseAsType(jsonContent); + test:assertEquals(x.id, 1); + test:assertEquals(x.name, "Class A"); + test:assertEquals(x.student.id, 2); + test:assertEquals(x.student.name, "John Doe"); + test:assertEquals(x.student.school.name, "ABC School"); + test:assertEquals(x.student.school.address.street, "Main St"); + test:assertEquals(x.student.school.address.city, "New York"); + test:assertEquals(x.teacher.id, 3); + test:assertEquals(x.teacher.name, "Jane Smith"); + test:assertEquals(x.monitor, null); +} + +@test:Config +isolated function testParseAsType7() returns Error? { + json nestedJson = { + "intValue": 5, + "floatValue": 2.5, + "stringValue": "nested", + "decimalValue": 5.00 + }; + + json jsonContent = { + "intValue": 10, + "nested1": nestedJson + }; + + TestRecord2 x = check parseAsType(jsonContent); + test:assertEquals(x.intValue, 10); + test:assertEquals(x.nested1.intValue, 5); +} + +@test:Config +isolated function testParseAsType8() returns Error? { + json jsonContent = { + "street": "Main", + "city": "Mahar", + "house": 94 + }; + + TestR x = check parseAsType(jsonContent); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, "Mahar"); +} + +@test:Config +isolated function testParseAsType9() returns Error? { + json jsonContent = { + "street": "Main", + "city": "Mahar", + "houses": [94, 95, 96] + }; + + TestArr1 x = check parseAsType(jsonContent); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, "Mahar"); + test:assertEquals(x.houses, [94, 95, 96]); +} + +@test:Config +isolated function testParseAsType10() returns Error? { + json jsonContent = { + "street": "Main", + "city": 11, + "house": [94, "Gedara"] + }; + + TestArr2 x = check parseAsType(jsonContent); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, 11); + test:assertEquals(x.house, [94, "Gedara"]); +} + +@test:Config +isolated function testParseAsType11() returns Error? { + json jsonContent = { + "street": "Main", + "city": "Mahar", + "house": [94, [1, 2, 3]] + }; + + TestArr3 x = check parseAsType(jsonContent); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, "Mahar"); + test:assertEquals(x.house, [94, [1, 2, 3]]); +} + +@test:Config +isolated function testParseAsType12() returns Error? { + json jsonContent = { + "street": "Main", + "city": { + "name": "Mahar", + "code": 94 + }, + "flag": true + }; + + TestJson x = check parseAsType(jsonContent); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, {"name": "Mahar", "code": 94}); +} + +@test:Config +isolated function testParseAsType14() { + json jsonContent = { + "id": 12, + "name": "Anne", + "address": { + "id": 34, + "city": "94" + } + }; + + RN|Error x = parseAsType(jsonContent); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "required field 'street' not present in JSON"); +} + +@test:Config +isolated function testParseAsType15() returns Error? { + json jsonContent = [1, 2, 3]; + + IntArr x = check parseAsType(jsonContent); + test:assertEquals(x, [1, 2, 3]); +} + +@test:Config +isolated function testParseAsType16() returns Error? { + json jsonContent = [1, "abc", [3, 4.0]]; + + Tuple|Error x = check parseAsType(jsonContent); + test:assertEquals(x, [1, "abc", [3, 4.0]]); +} + +@test:Config +isolated function testParseAsType17() returns Error? { + json jsonContent = { + "street": "Main", + "city": { + "name": "Mahar", + "code": 94, + "internal": { + "id": 12, + "agent": "Anne" + } + }, + "flag": true + }; + + TestJson x = check parseAsType(jsonContent); + test:assertEquals(x.street, "Main"); + test:assertEquals(x.city, {"name": "Mahar", "code": 94, "internal": {"id": 12, "agent": "Anne"}}); +} + +@test:Config +isolated function testParseAsType18() returns Error? { + json jsonContent = { + "books": [ + { + "title": "The Great Gatsby", + "author": "F. Scott Fitzgerald" + }, + { + "title": "The Grapes of Wrath", + "author": "John Steinbeck" + }, + { + "title": "Binary Echoes: Unraveling the Digital Web", + "author": "Alexandra Quinn" + } + ] + }; + + Library x = check parseAsType(jsonContent); + test:assertEquals(x.books.length(), 2); + test:assertEquals(x.books[0].title, "The Great Gatsby"); + test:assertEquals(x.books[0].author, "F. Scott Fitzgerald"); + test:assertEquals(x.books[1].title, "The Grapes of Wrath"); + test:assertEquals(x.books[1].author, "John Steinbeck"); +} + +@test:Config +isolated function testParseAsType19() returns Error? { + json jsonContent = { + "books": [ + { + "title": "The Great Gatsby", + "author": "F. Scott Fitzgerald" + }, + { + "title": "The Grapes of Wrath", + "author": "John Steinbeck" + }, + { + "title": "Binary Echoes: Unraveling the Digital Web", + "author": "Alexandra Quinn" + } + ] + }; + + LibraryB x = check parseAsType(jsonContent); + test:assertEquals(x.books.length(), 2); + test:assertEquals(x.books[0].title, "The Great Gatsby"); + test:assertEquals(x.books[0].author, "F. Scott Fitzgerald"); + test:assertEquals(x.books[1].title, "The Grapes of Wrath"); + test:assertEquals(x.books[1].author, "John Steinbeck"); + + LibraryC y = check parseAsType(jsonContent); + test:assertEquals(y.books.length(), 3); + test:assertEquals(y.books[0].title, "The Great Gatsby"); + test:assertEquals(y.books[0].author, "F. Scott Fitzgerald"); + test:assertEquals(y.books[1].title, "The Grapes of Wrath"); + test:assertEquals(y.books[1].author, "John Steinbeck"); + test:assertEquals(y.books[2].title, "Binary Echoes: Unraveling the Digital Web"); + test:assertEquals(y.books[2].author, "Alexandra Quinn"); +} + +@test:Config +isolated function testParseAsType20() returns Error? { + json jsonVal1 = { + "a": { + "c": "world", + "d": "2" + }, + "b": { + "c": "world", + "d": "2" + } + }; + + record {| + record {| + string c; + string d; + |}...; + |} val1 = check parseAsType(jsonVal1); + test:assertEquals(val1.length(), 2); + test:assertEquals(val1["a"]["c"], "world"); + test:assertEquals(val1["a"]["d"], "2"); + test:assertEquals(val1["b"]["c"], "world"); + test:assertEquals(val1["b"]["d"], "2"); + + record {| + map...; + |} val2 = check parseAsType(jsonVal1); + test:assertEquals(val2.length(), 2); + test:assertEquals(val2["a"]["c"], "world"); + test:assertEquals(val2["a"]["d"], "2"); + test:assertEquals(val2["b"]["c"], "world"); + test:assertEquals(val2["b"]["d"], "2"); + + json jsonVal3 = { + "a": [{ + "c": "world", + "d": "2" + }], + "b": [{ + "c": "war", + "d": "3" + }] + }; + + record {| + record {| + string c; + string d; + |}[]...; + |} val3 = check parseAsType(jsonVal3); + test:assertEquals(val3.length(), 2); + test:assertEquals(val3["a"], [{ + "c": "world", + "d": "2" + }]); + test:assertEquals(val3["b"], [{ + "c": "war", + "d": "3" + }]); +} + +@test:Config +isolated function testUnionTypeAsExpTypeForParseAsType() returns Error? { + decimal|float val1 = check parseAsType(1.0); + test:assertEquals(val1, 1.0d); + + json jsonVal2 = { + "a": "hello", + "b": 1.0 + }; + + record {| + decimal|float b; + |} val2 = check parseAsType(jsonVal2); + test:assertEquals(val2.length(), 1); + test:assertEquals(val2.b, 1.0d); + + json jsonVal3 = { + "a": { + "b": 1, + "d": { + "e": false + } + }, + "c": 2.0 + }; + + record {| + record {| int|decimal b; record {| string|boolean e; |} d; |} a; + decimal|float c; + |} val3 = check parseAsType(jsonVal3); + test:assertEquals(val3.length(), 2); + test:assertEquals(val3.a.length(), 2); + test:assertEquals(val3.a.b, 1); + test:assertEquals(val3.a.d.e, false); + test:assertEquals(val3.c, 2.0d); +} + +@test:Config +isolated function testAnydataAsExpTypeForParseAsType() returns Error? { + anydata val1 = check parseAsType(1); + test:assertEquals(val1, 1); + + json jsonVal2 = { + "a": "hello", + "b": 1 + }; + + anydata val2 = check parseAsType(jsonVal2); + test:assertEquals(val2, {"a": "hello", "b": 1}); + + record {| + record {| + int b; + record {| + string e; + |} d; + |} a; + int c; + |} jsonVal3 = { + "a": { + "b": 1, + "d": { + "e": "hello" + } + }, + "c": 2 + }; + + anydata val3 = check parseAsType(jsonVal3); + test:assertEquals(val3, {"a": {"b": 1, "d": {"e": "hello"}}, "c": 2}); + + record {| + record {| + int b; + record {| + string e; + |} d; + |}[] a; + int c; + |} jsonVal4 = { + "a": [{ + "b": 1, + "d": { + "e": "hello" + } + }], + "c": 2 + }; + + anydata val4 = check parseAsType(jsonVal4); + test:assertEquals(val4, {"a": [{"b": 1, "d": {"e": "hello"}}], "c": 2}); + + [[int], int] str5 = [[1], 2]; + anydata val5 = check parseAsType(str5); + test:assertEquals(val5, [[1], 2]); +} + +@test:Config +isolated function testJsonAsExpTypeForParseAsType() returns Error? { + json val1 = check parseAsType(1); + test:assertEquals(val1, 1); + + record {| + string a; + int b; + |} jsonVal2 = { + "a": "hello", + "b": 1 + }; + + json val2 = check parseAsType(jsonVal2); + test:assertEquals(val2, {"a": "hello", "b": 1}); + + record {| + record {| + int b; + record {| + string e; + |} d; + |} a; + int c; + |} jsonVal3 = { + "a": { + "b": 1, + "d": { + "e": "hello" + } + }, + "c": 2 + }; + + json val3 = check parseAsType(jsonVal3); + test:assertEquals(val3, {"a": {"b": 1, "d": {"e": "hello"}}, "c": 2}); + + record {| + record {| + int b; + record {| + string e; + |} d; + |}[] a; + int c; + |} jsonVal4 = { + "a": [{ + "b": 1, + "d": { + "e": "hello" + } + }], + "c": 2 + }; + + json val4 = check parseAsType(jsonVal4); + test:assertEquals(val4, {"a": [{"b": 1, "d": {"e": "hello"}}], "c": 2}); + + [[int], float] jsonVal5 = [[1], 2]; + json val5 = check parseAsType(jsonVal5); + test:assertEquals(val5, [[1], 2.0]); +} + +@test:Config +isolated function testMapAsExpTypeForParseAsType() returns Error? { + record {| + string a; + string b; + |} jsonVal1 = { + "a": "hello", + "b": "1" + }; + + map val1 = check parseAsType(jsonVal1); + test:assertEquals(val1, {"a": "hello", "b": "1"}); + + json jsonVal2 = { + "a": "hello", + "b": 1, + "c": { + "d": "world", + "e": "2" + } + }; + record {| + string a; + int b; + map c; + |} val2 = check parseAsType(jsonVal2); + test:assertEquals(val2.a, "hello"); + test:assertEquals(val2.b, 1); + test:assertEquals(val2.c, {"d": "world", "e": "2"}); + + json jsonVal3 = { + "a": { + "c": "world", + "d": "2" + }, + "b": { + "c": "war", + "d": "3" + } + }; + + map> val3 = check parseAsType(jsonVal3); + test:assertEquals(val3, {"a": {"c": "world", "d": "2"}, "b": {"c": "war", "d": "3"}}); + + record {| + map a; + |} val4 = check parseAsType(jsonVal3); + test:assertEquals(val4.a, {"c": "world", "d": "2"}); + + map val5 = check parseAsType(jsonVal3); + test:assertEquals(val5, {"a": {"c": "world", "d": "2"}, "b": {"c": "war", "d": "3"}}); + + json jsonVal6 = { + a: "Kanth", + b: { + g: { + c: "hello", + d: "1" + }, + h: { + c: "world", + d: "2" + } + } + }; + record {| + string a; + map> b; + |} val6 = check parseAsType(jsonVal6); + test:assertEquals(val6.a, "Kanth"); + test:assertEquals(val6.b, {g: {c: "hello", d: "1"}, h: {c: "world", d: "2"}}); +} + +@test:Config +isolated function testProjectionInTupleForParseAsType() returns Error? { + float[] jsonVal1 = [1, 2, 3, 4, 5, 8]; + [float, float] val1 = check parseAsType(jsonVal1); + test:assertEquals(val1, [1.0, 2.0]); + + record {| + float[] a; + |} jsonVal2 = { + "a": [1, 2, 3, 4, 5, 8] + }; + record {| [float, float] a; |} val2 = check parseAsType(jsonVal2); + test:assertEquals(val2.a, [1.0, 2.0]); + + [int, string] str3 = [1, "4"]; + [int] val3 = check parseAsType(str3); + test:assertEquals(val3, [1]); + + [string, record {|json...;|}] jsonVal4 = ["1", {}]; + [string] val4 = check parseAsType(jsonVal4); + test:assertEquals(val4, ["1"]); + + [string, int[], map] jsonVal5 = ["1", [], {"name": 1}]; + [string] val5 = check parseAsType(jsonVal5); + test:assertEquals(val5, ["1"]); +} + +@test:Config +isolated function testProjectionInArrayForParseAsType() returns Error? { + int[2] val1 = check parseAsType([1, 2, 3, 4, 5]); + test:assertEquals(val1, [1, 2]); + + record {| + int[] a; + |} jsonVal2 = { + "a": [1, 2, 3, 4, 5] + }; + record {| int[2] a; |} val2 = check parseAsType(jsonVal2); + test:assertEquals(val2, {a: [1, 2]}); + + json jsonVal3 = { + "a": [1, 2, 3, 4, 5], + "b": [1, 2, 3, 4, 5] + }; + record {| int[2] a; int[3] b; |} val3 = check parseAsType(jsonVal3); + test:assertEquals(val3, {a: [1, 2], b: [1, 2, 3]}); + + json jsonVal4 = { + "employees": [ + { "name": "Prakanth", + "age": 26 + }, + { "name": "Kevin", + "age": 25 + } + ] + }; + record {| record {| string name; int age; |}[1] employees; |} val4 = check parseAsType(jsonVal4); + test:assertEquals(val4, {employees: [{name: "Prakanth", age: 26}]}); + + [int, int, int, record {|int a;|}] jsonVal5 = [1, 2, 3, { a : 2 }]; + int[2] val5 = check parseAsType(jsonVal5); + test:assertEquals(val5, [1, 2]); +} + +@test:Config +isolated function testProjectionInRecordForParseAsType() returns Error? { + json jsonVal1 = {"name": "John", "age": 30, "city": "New York"}; + record {| string name; string city; |} val1 = check parseAsType(jsonVal1); + test:assertEquals(val1, {name: "John", city: "New York"}); + + json jsonVal2 = {"name": "John", "age": "30", "city": "New York"}; + record {| string name; string city; |} val2 = check parseAsType(jsonVal2); + test:assertEquals(val2, {name: "John", city: "New York"}); + + json jsonVal3 = { "name": "John", + "company": { + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }, + "city": "New York" }; + record {| string name; string city; |} val3 = check parseAsType(jsonVal3); + test:assertEquals(val3, {name: "John", city: "New York"}); + + json jsonVal4 = { "name": "John", + "company": [{ + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }], + "city": "New York" }; + record {| string name; string city; |} val4 = check parseAsType(jsonVal4); + test:assertEquals(val4, {name: "John", city: "New York"}); + + json jsonVal5 = { "name": "John", + "company1": [{ + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }], + "city": "New York", + "company2": [{ + "name": "amzn", + "year": 2024, + "addrees": { + "street": "123", + "city": "Miami" + } + }] + }; + record {| string name; string city; |} val5 = check parseAsType(jsonVal5); + test:assertEquals(val5, {name: "John", city: "New York"}); +} + +@test:Config +isolated function testArrayOrTupleCaseForParseAsType() returns Error? { + json jsonVal1 = [[1], 2.0]; + [[int], float] val1 = check parseAsType(jsonVal1); + test:assertEquals(val1, [[1], 2.0]); + + json jsonVal2 = [[1, 2], 2.0]; + [[int, int], float] val2 = check parseAsType(jsonVal2); + test:assertEquals(val2, [[1, 2], 2.0]); + + json jsonStr3 = [[1, 2], [2, 3]]; + int[][] val3 = check parseAsType(jsonStr3); + test:assertEquals(val3, [[1, 2], [2, 3]]); + + json jsonVal4 = {"val" : [[1, 2], "2.0", 3.0, [5, 6]]}; + record {| + [[int, int], string, float, [int, int]] val; + |} val4 = check parseAsType(jsonVal4); + test:assertEquals(val4, {val: [[1, 2], "2.0", 3.0, [5, 6]]}); + + json jsonVal41 = {"val1" : [[1, 2], "2.0", 3.0, [5, 6]], "val2" : [[1, 2], "2.0", 3.0, [5, 6]]}; + record {| + [[int, int], string, float, [int, int]] val1; + [[int, int], string, float, [int, int]] val2; + |} val41 = check parseAsType(jsonVal41); + test:assertEquals(val41, {val1: [[1, 2], "2.0", 3.0, [5, 6]], val2: [[1, 2], "2.0", 3.0, [5, 6]]}); + + json jsonVal5 = {"val" : [[1, 2], [2, 3]]}; + record {| + int[][] val; + |} val5 = check parseAsType(jsonVal5); + test:assertEquals(val5, {val: [[1, 2], [2, 3]]}); + + json jsonVal6 = [{"val" : [[1, 2], [2, 3]]}]; + [record {|int[][] val;|}] val6 = check parseAsType(jsonVal6); + test:assertEquals(val6, [{val: [[1, 2], [2, 3]]}]); +} + +@test:Config +isolated function testListFillerValuesWithParseAsType() returns Error? { + int[2] jsonVal1 = check parseAsType([1]); + test:assertEquals(jsonVal1, [1, 0]); + + [int, float, string, boolean] jsonVal2 = check parseAsType([1]); + test:assertEquals(jsonVal2, [1, 0.0, "", false]); + + record {| + float[3] A; + [int, decimal, float, boolean] B; + |} jsonVal3 = check parseAsType({A: [1], B: [1]}); + test:assertEquals(jsonVal3, {A: [1.0, 0.0, 0.0], B: [1, 0d, 0.0, false]}); +} + +@test:Config +isolated function testNameAnnotationWithParseAsType() returns Error? { + json jsonContent = { + "id": 1, + "title-name": "Harry Potter", + "author-name": "J.K. Rowling" + }; + + Book2 book = check parseAsType(jsonContent); + test:assertEquals(book.id, 1); + test:assertEquals(book.title, "Harry Potter"); + test:assertEquals(book.author, "J.K. Rowling"); +} + +@test:Config { + dataProvider: dataProviderForSubTypeIntPostiveCasesWithParseAsType +} +isolated function testSubTypeOfIntAsExpectedTypeWithParseAsType(json sourceData, typedesc expType, anydata expectedResult) returns Error? { + anydata val = check parseAsType(sourceData, {}, expType); + test:assertEquals(val, expectedResult); +} + +function dataProviderForSubTypeIntPostiveCasesWithParseAsType() returns [json, typedesc, anydata][] { + return [ + [255, byte, 255], + [255, int:Unsigned8, 255], + [0, byte, 0], + [0, int:Unsigned8, 0], + [127, int:Signed8, 127], + [-128, int:Signed8, -128], + [65535, int:Unsigned16, 65535], + [0, int:Unsigned16, 0], + [32767, int:Signed16, 32767], + [-32768, int:Signed16, -32768], + [4294967295, int:Unsigned32, 4294967295], + [0, int:Unsigned32, 0], + [2147483647, int:Signed32, 2147483647], + [-2147483648, int:Signed32, -2147483648], + [[255, 127, 32767, 2147483647, 255, 32767, 2147483647], [byte, int:Signed8, int:Signed16, int:Signed32, int:Unsigned8, int:Unsigned16, int:Unsigned32], [255, 127, 32767, 2147483647, 255, 32767, 2147483647]] + ]; +} + +@test:Config +isolated function testSubTypeOfIntAsFieldTypeForParseAsType() returns error? { + json jsonVal4 = { + "a": 1, + "b": 127, + "c": 32767, + "d": 2147483647, + "e": 255, + "f": 32767, + "g": 2147483647 + }; + record {| + byte a; + int:Signed8 b; + int:Signed16 c; + int:Signed32 d; + int:Unsigned8 e; + int:Unsigned16 f; + int:Unsigned32 g; + |} val16 = check parseAsType(jsonVal4); + test:assertEquals(val16, {a: 1, b: 127, c: 32767, d: 2147483647, e: 255, f: 32767, g: 2147483647}); +} + +@test:Config +isolated function testSingletonAsExpectedTypeForParseAsType() returns Error? { + "1" val1 = check parseAsType("1"); + test:assertEquals(val1, "1"); + + Singleton1 val2 = check parseAsType(1); + test:assertEquals(val2, 1); + + SingletonUnion val3 = check parseAsType(2); + test:assertEquals(val3, 2); + + () val4 = check parseAsType(null); + test:assertEquals(val4, ()); + + json jsonContent = { + value: 1, + id: "3" + }; + SingletonInRecord val5 = check parseAsType(jsonContent); + test:assertEquals(val5.id, "3"); + test:assertEquals(val5.value, 1); +} + +@test:Config +isolated function testParseAsTypeNegative1() returns Error? { + json jsonContent = { + "id": 12, + "name": "Anne", + "address": { + "street": "Main", + "city": "94", + "id": true + } + }; + + RN|Error x = parseAsType(jsonContent); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "incompatible value 'true' for type 'int' in field 'address.id'"); +} + +@test:Config +isolated function testParseAsTypeNegative2() returns Error? { + json jsonContent = { + "id": 12 + }; + + RN2|Error x = parseAsType(jsonContent); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "required field 'name' not present in JSON"); +} + +@test:Config +isolated function testParseAsTypeNegative3() returns Error? { + json jsonContent = { + "id": 12, + "name": "Anne", + "address": { + "street": "Main", + "city": "94" + } + }; + + RN|Error x = parseAsType(jsonContent); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "required field 'id' not present in JSON"); +} + +@test:Config +isolated function testParseAsTypeNegative4() returns Error? { + json jsonContent = { + name: "John" + }; + + int|Error x = parseAsType(jsonContent); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "incompatible expected type 'int' for value '{\"name\":\"John\"}'"); + + Union|Error y = parseAsType(jsonContent); + test:assertTrue(y is Error); + test:assertEquals((y).message(), "invalid type 'data.jsondata:Union' expected 'anydata'"); + + table|Error z = parseAsType(jsonContent); + test:assertTrue(z is Error); + test:assertEquals((z).message(), "invalid type 'table' expected 'anydata'"); + + RN2|Error a = parseAsType("1"); + test:assertTrue(a is Error); + test:assertEquals((a).message(), "incompatible expected type 'data.jsondata:RN2' for value '1'"); + + string|Error b = parseAsType(1); + test:assertTrue(b is Error); + test:assertEquals((b).message(), "incompatible expected type 'string' for value '1'"); +} + +@test:Config +isolated function testParseAsTypeNegative6() { + json jsonContent = { + "street": "Main", + "city": "Mahar", + "house": [94, [1, 3, "4"]] + }; + + TestArr3|Error x = parseAsType(jsonContent); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "incompatible value '4' for type 'int' in field 'house'"); +} + +@test:Config +isolated function testDuplicateFieldInRecordTypeWithParseAsType() returns Error? { + json jsonContent = string `{ + "title": "Clean Code", + "author": "Robert C. Martin", + `; + + BookN|Error x = parseAsType(jsonContent); + test:assertTrue(x is Error); + test:assertEquals((x).message(), "duplicate field 'author'"); +} + +@test:Config +isolated function testProjectionInArrayNegativeForParseAsType() { + [int, int, int, record {|int a;|}] jsonVal5 = [1, 2, 3, { a : 2 }]; + int[]|Error val5 = parseAsType(jsonVal5); + test:assertTrue(val5 is Error); + test:assertEquals((val5).message(), "incompatible expected type 'int' for value '{\"a\":2}'"); +} + +@test:Config { + dataProvider: dataProviderForSubTypeOfIntNegativeTestForParseAsType +} +isolated function testSubTypeOfIntAsExptypeWithParseAsTypeNegative(json sourceData, typedesc expType, string expectedError) { + anydata|Error result = parseAsType(sourceData, {}, expType); + test:assertTrue(result is Error); + test:assertEquals((result).message(), expectedError); +} + +function dataProviderForSubTypeOfIntNegativeTestForParseAsType() returns [json, typedesc, string][] { + string incompatibleStr = "incompatible expected type "; + return [ + [256, byte, incompatibleStr + "'byte' for value '256'"], + [-1, byte, incompatibleStr + "'byte' for value '-1'"], + [128, int:Signed8, incompatibleStr + "'lang.int:Signed8' for value '128'"], + [-129, int:Signed8, incompatibleStr + "'lang.int:Signed8' for value '-129'"], + [256, int:Unsigned8, incompatibleStr + "'lang.int:Unsigned8' for value '256'"], + [-1, int:Unsigned8, incompatibleStr + "'lang.int:Unsigned8' for value '-1'"], + [32768, int:Signed16, incompatibleStr + "'lang.int:Signed16' for value '32768'"], + [-32769, int:Signed16, incompatibleStr + "'lang.int:Signed16' for value '-32769'"], + [65536, int:Unsigned16, incompatibleStr + "'lang.int:Unsigned16' for value '65536'"], + [-1, int:Unsigned16, incompatibleStr + "'lang.int:Unsigned16' for value '-1'"], + [2147483648, int:Signed32, incompatibleStr + "'lang.int:Signed32' for value '2147483648'"], + [-2147483649, int:Signed32, incompatibleStr + "'lang.int:Signed32' for value '-2147483649'"], + [4294967296, int:Unsigned32, incompatibleStr + "'lang.int:Unsigned32' for value '4294967296'"], + [-1, int:Unsigned32, incompatibleStr + "'lang.int:Unsigned32' for value '-1'"] + ]; +} + +@test:Config +isolated function testRecordWithRestAsExpectedTypeForParseAsTypeNegative() { + json jsonVal = { + id: 1, + name: "Anne", + measurements: { + height: 5.5, + weight: 60, + shoeSize: "7" + } + }; + + PersonA|error val = parseAsType(jsonVal); + test:assertTrue(val is error); + test:assertEquals((val).message(), "incompatible value '7' for type 'int' in field 'measurements'"); +} diff --git a/ballerina/tests/from_json_with_options.bal b/ballerina/tests/from_json_with_options.bal new file mode 100644 index 0000000..fbb56ac --- /dev/null +++ b/ballerina/tests/from_json_with_options.bal @@ -0,0 +1,301 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import ballerina/test; + +const options = { + allowDataProjection: false +}; + +@test:Config +isolated function testDisableDataProjectionInArrayTypeForParseString() { + string jsonStr1 = string `[1, 2, 3, 4]`; + int[2]|error val1 = parseString(jsonStr1, options); + test:assertTrue(val1 is error); + test:assertEquals((val1).message(), "array size is not compatible with the expected size"); + + string strVal2 = string `{ + "a": [1, 2, 3, 4, 5] + }`; + record {|int[2] a;|}|error val2 = parseString(strVal2, options); + test:assertTrue(val2 is error); + test:assertEquals((val2).message(), "array size is not compatible with the expected size"); + + string strVal3 = string `{ + "a": [1, 2, 3, 4, 5], + "b": [1, 2, 3, 4, 5] + }`; + record {|int[2] a; int[3] b;|}|error val3 = parseString(strVal3, options); + test:assertTrue(val3 is error); + test:assertEquals((val3).message(), "array size is not compatible with the expected size"); + + string strVal4 = string `{ + "employees": [ + { "name": "Prakanth", + "age": 26 + }, + { "name": "Kevin", + "age": 25 + } + ] + }`; + record {|record {|string name; int age;|}[1] employees;|}|error val4 = parseString(strVal4, options); + test:assertTrue(val4 is error); + test:assertEquals((val4).message(), "array size is not compatible with the expected size"); + + string strVal5 = string `["1", 2, 3, { "a" : val_a }]`; + int[3]|error val5 = parseString(strVal5, options); + test:assertTrue(val5 is error); + test:assertEquals((val5).message(), "array size is not compatible with the expected size"); +} + +@test:Config +isolated function testDisableDataProjectionInTupleTypeForParseString() { + string str1 = string `[1, 2, 3, 4, 5, 8]`; + [string, float]|error val1 = parseString(str1, options); + test:assertTrue(val1 is error); + test:assertEquals((val1).message(), "array size is not compatible with the expected size"); + + string str2 = string `{ + "a": [1, 2, 3, 4, 5, 8] + }`; + record {|[string, float] a;|}|error val2 = parseString(str2, options); + test:assertTrue(val2 is error); + test:assertEquals((val2).message(), "array size is not compatible with the expected size"); + + string str3 = string `[1, "4"]`; + [float]|error val3 = parseString(str3, options); + test:assertTrue(val3 is error); + test:assertEquals((val3).message(), "array size is not compatible with the expected size"); + + string str4 = string `["1", {}]`; + [float]|error val4 = parseString(str4, options); + test:assertTrue(val4 is error); + test:assertEquals((val4).message(), "array size is not compatible with the expected size"); + + string str5 = string `["1", [], {"name": 1}]`; + [float]|error val5 = parseString(str5, options); + test:assertTrue(val5 is error); + test:assertEquals((val5).message(), "array size is not compatible with the expected size"); +} + +@test:Config +isolated function testDisableDataProjectionInRecordTypeWithParseString() { + string jsonStr1 = string `{"name": "John", "age": 30, "city": "New York"}`; + record {|string name; string city;|}|error val1 = parseString(jsonStr1, options); + test:assertTrue(val1 is error); + test:assertEquals((val1).message(), "undefined field 'age'"); + + string jsonStr2 = string `{"name": John, "age": "30", "city": "New York"}`; + record {|string name; string city;|}|error val2 = parseString(jsonStr2, options); + test:assertTrue(val2 is error); + test:assertEquals((val2).message(), "undefined field 'age'"); + + string jsonStr3 = string `{ "name": "John", + "company": { + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }, + "city": "New York" }`; + record {|string name; string city;|}|error val3 = parseString(jsonStr3, options); + test:assertTrue(val3 is error); + test:assertEquals((val3).message(), "undefined field 'company'"); + + string jsonStr4 = string `{ "name": "John", + "company": [{ + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }], + "city": "New York" }`; + record {|string name; string city;|}|error val4 = parseString(jsonStr4, options); + test:assertTrue(val4 is error); + test:assertEquals((val4).message(), "undefined field 'company'"); + + string jsonStr5 = string `{ "name": "John", + "company1": [{ + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }], + "city": "New York", + "company2": [{ + "name": "amzn", + "year": 2024, + "addrees": { + "street": "123", + "city": "Miami" + } + }] + }`; + record {|string name; string city;|}|error val5 = parseString(jsonStr5, options); + test:assertTrue(val5 is error); + test:assertEquals((val5).message(), "undefined field 'company1'"); +} + +@test:Config +isolated function testDisableDataProjectionInArrayTypeForParseAsType() { + json jsonVal1 = [1, 2, 3, 4]; + int[2]|error val1 = parseAsType(jsonVal1, options); + test:assertTrue(val1 is error); + test:assertEquals((val1).message(), "array size is not compatible with the expected size"); + + json jsonVal2 = { + a: [1, 2, 3, 4, 5] + }; + record {|int[2] a;|}|error val2 = parseAsType(jsonVal2, options); + test:assertTrue(val2 is error); + test:assertEquals((val2).message(), "array size is not compatible with the expected size"); + + json jsonVal3 = { + a: [1, 2, 3, 4, 5], + b: [1, 2, 3, 4, 5] + }; + record {|int[2] a; int[3] b;|}|error val3 = parseAsType(jsonVal3, options); + test:assertTrue(val3 is error); + test:assertEquals((val3).message(), "array size is not compatible with the expected size"); + + json jsonVal4 = { + employees: [ + { + name: "Prakanth", + age: 26 + }, + { + name: "Kevin", + age: 25 + } + ] + }; + record {|record {|string name; int age;|}[1] employees;|}|error val4 = parseAsType(jsonVal4, options); + test:assertTrue(val4 is error); + test:assertEquals((val4).message(), "array size is not compatible with the expected size"); + + json jsonVal5 = ["1", 2, 3, {a: "val_a"}]; + int[3]|error val5 = parseAsType(jsonVal5, options); + test:assertTrue(val5 is error); + test:assertEquals((val5).message(), "array size is not compatible with the expected size"); +} + +@test:Config +isolated function testDisableDataProjectionInTupleTypeForParseAsType() { + json jsonVal1 = [1, 2, 3, 4, 5, 8]; + [int, int]|error val1 = parseAsType(jsonVal1, options); + test:assertTrue(val1 is error); + test:assertEquals((val1).message(), "array size is not compatible with the expected size"); + + json jsonVal2 = { + a: [1, 2, 3, 4, 5, 8] + }; + record {|[int, int] a;|}|error val2 = parseAsType(jsonVal2, options); + test:assertTrue(val2 is error); + test:assertEquals((val2).message(), "array size is not compatible with the expected size"); + + json jsonVal3 = [1, "4"]; + [int]|error val3 = parseAsType(jsonVal3, options); + test:assertTrue(val3 is error); + test:assertEquals((val3).message(), "array size is not compatible with the expected size"); + + json jsonVal4 = ["1", {}]; + [string]|error val4 = parseAsType(jsonVal4, options); + test:assertTrue(val4 is error); + test:assertEquals((val4).message(), "array size is not compatible with the expected size"); + + json jsonVal5 = ["1", [], {"name": 1}]; + [string]|error val5 = parseAsType(jsonVal5, options); + test:assertTrue(val5 is error); + test:assertEquals((val5).message(), "array size is not compatible with the expected size"); +} + +@test:Config +isolated function testDisableDataProjectionInRecordTypeWithParseAsType() { + json jsonVal1 = {"name": "John", "age": 30, "city": "New York"}; + record {|string name; string city;|}|error val1 = parseAsType(jsonVal1, options); + test:assertTrue(val1 is error); + test:assertEquals((val1).message(), "undefined field 'age'"); + + json jsonVal2 = { + "name": "John", + "company": { + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + }, + "city": "New York" + }; + record {|string name; string city;|}|error val2 = parseAsType(jsonVal2, options); + test:assertTrue(val2 is error); + test:assertEquals((val2).message(), "undefined field 'company'"); + + json jsonVal3 = { + "name": "John", + "company": [ + { + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + } + ], + "city": "New York" + }; + record {|string name; string city;|}|error val3 = parseAsType(jsonVal3, options); + test:assertTrue(val3 is error); + test:assertEquals((val3).message(), "undefined field 'company'"); + + json jsonVal4 = { + "name": "John", + "company1": [ + { + "name": "wso2", + "year": 2024, + "addrees": { + "street": "123", + "city": "Berkeley" + } + } + ], + "city": "New York", + "company2": [ + { + "name": "amzn", + "year": 2024, + "addrees": { + "street": "123", + "city": "Miami" + } + } + ] + }; + record {|string name; string city;|}|error val4 = parseAsType(jsonVal4, options); + test:assertTrue(val4 is error); + test:assertEquals((val4).message(), "undefined field 'company1'"); +} diff --git a/ballerina/tests/readonly_intersection_expected_type_test.bal b/ballerina/tests/readonly_intersection_expected_type_test.bal new file mode 100644 index 0000000..4ca2ede --- /dev/null +++ b/ballerina/tests/readonly_intersection_expected_type_test.bal @@ -0,0 +1,318 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import ballerina/test; + +type intArrayReadonly int[] & readonly; + +type intArray2dReadonly int[][] & readonly; + +type booleanArrayReadonly boolean[] & readonly; + +type type1Readonly [int, boolean, decimal, string] & readonly; + +type type2Readonly map & readonly; + +type type3Readonly map & readonly; + +type type4Readonly map> & readonly; + +type type5Readonly map[] & readonly; + +type mapIntArrayReadonly map & readonly; + +type jsonTypeReadonly json & readonly; + +type int2ArrayReadonly int[2] & readonly; + +type char2ArrayReadonly string:Char[2] & readonly; + +type char2DFixedArrayReadonly string:Char[3][4] & readonly; + +type int2DFixedArrayReadonly int[2][1] & readonly; + +type intTupleReadonly [[int], [int]] & readonly; + +type intTupleRestReadonly [[int], [int]...] & readonly; + +type intStringTupleReadonly [[int], [string]] & readonly; + +type intStringTupleRestReadonly [[int], [string]...] & readonly; + +type NilTypeReadonly () & readonly; + +type BooleanTypeReadonly boolean & readonly; + +type intTypeReadonly int & readonly; + +type floatTypeReadonly float & readonly; + +type decimalTypeReadonly decimal & readonly; + +type stringTypeReadonly string & readonly; + +type charTypeReadonly string:Char & readonly; + +type ByteTypeReadonly byte & readonly; + +type intUnsigned8Readonly int:Unsigned8 & readonly; + +type intSigned8Readonly int:Signed8 & readonly; + +type intUnsigned16Readonly int:Unsigned16 & readonly; + +type intSigned16Readonly int:Signed16 & readonly; + +type intUnsigned32Readonly int:Unsigned32 & readonly; + +type intSigned32Readonly int:Signed32 & readonly; + +type strinttupleReadonly [int, int] & readonly; + +type stringArrReadonly string[] & readonly; + +type tuple1Readonly [[int, string], [boolean, float]] & readonly; + +type tuple2Readonly [[float, string], [boolean, decimal]...] & readonly; + +type stringArrayTypeReadonly string[] & readonly; + +type Rec1ReadOnly Rec1 & readonly; + +type Rec2ReadOnly Rec2 & readonly; + +type Rec3ReadOnly Rec3 & readonly; + +type Rec1 record {| + string name; + int age; + boolean isMarried = true; + float...; +|}; + +type Rec2 record {| + Rec1 student; + string address; + int count; + float weight = 18.3; + boolean...; +|}; + +type Rec3 record {| + Rec1 student; +|}; + +type Rec4 record {| + readonly string department; + intTypeReadonly studentCount; + Rec1ReadOnly[] student; +|}; + +type Rec5 record {| + readonly & int id; + Rec2 & readonly health; +|}; + +type ExpectedTuple [ + intArrayReadonly, + type1Readonly, + intArrayReadonly, + intArray2dReadonly, + type3Readonly, + type4Readonly, + type5Readonly, + mapIntArrayReadonly, + int2ArrayReadonly, + int2DFixedArrayReadonly, + intTupleReadonly, + intTupleRestReadonly, + intTupleRestReadonly, + intStringTupleRestReadonly, + intStringTupleRestReadonly, + intTupleReadonly, + int2DFixedArrayReadonly, + BooleanTypeReadonly, + BooleanTypeReadonly, + intTypeReadonly, + floatTypeReadonly, + decimalTypeReadonly, + stringTypeReadonly, + charTypeReadonly, + ByteTypeReadonly, + intUnsigned8Readonly, + intSigned8Readonly, + intUnsigned16Readonly, + intSigned16Readonly, + intUnsigned32Readonly, + intSigned32Readonly, + NilTypeReadonly, + Rec1ReadOnly, + Rec3ReadOnly, + Rec2ReadOnly, + Rec4, + Rec5 +]; + +ExpectedTuple expectedResults = [ + [1, 2, 3], + [12, true, 123.4, "hello"], + [12, 13], + [[12], [13]], + {id: false, age: true}, + {key1: {id: 12, age: 24}, key2: {id: 12, age: 24}}, + [{id: 12, age: 24}, {id: 12, age: 24}], + {key1: [12, 13], key2: [132, 133]}, + [12], + [[1], [2]], + [[1], [2]], + [[1], [2], [3]], + [[1]], + [[1], ["2"], ["3"]], + [[1]], + [[1], [2]], + [[1], [2]], + true, + false, + 12, + 12.3, + 12.3, + "hello", + "h", + 12, + 13, + 14, + 15, + 16, + 17, + 18, + null, + {name: "John", age: 30, "height": 1.8}, + {student: {name: "John", age: 30, "height": 1.8}}, + {"isSingle": true, address: "this is address", count: 14, student: {name: "John", age: 30, "height": 1.8}}, + {department: "CSE", studentCount: 3, student: [{name: "John", age: 30, "height": 1.8}]}, + {id: 12, health: {student: {name: "John", age: 30, "height": 1.8}, address: "this is address", count: 14}} +]; + +@test:Config { + dataProvider: readonlyIntersectionTestDataForParseString +} +isolated function testReadOnlyIntersectionTypeAsExpTypForParseString(string sourceData, + typedesc expType, anydata expectedData) returns error? { + anydata result = check parseString(sourceData, {}, expType); + test:assertEquals(result, expectedData); +} + +function readonlyIntersectionTestDataForParseString() returns [string, typedesc, anydata][] { + return [ + [string `[1, 2, 3]`, intArrayReadonly, expectedResults[0]], + ["[12, true, 123.4, \"hello\"]", type1Readonly, expectedResults[1]], + ["[12, 13]", intArrayReadonly, expectedResults[2]], + ["[[12], [13]]", intArray2dReadonly, expectedResults[3]], + ["{\"id\": false, \"age\": true}", type3Readonly, expectedResults[4]], + ["{\"key1\": {\"id\": 12, \"age\": 24}, \"key2\": {\"id\": 12, \"age\": 24}}", type4Readonly, expectedResults[5]], + ["[{\"id\": 12, \"age\": 24}, {\"id\": 12, \"age\": 24}]", type5Readonly, expectedResults[6]], + ["{\"key1\": [12, 13], \"key2\": [132, 133]}", mapIntArrayReadonly, expectedResults[7]], + ["[12]", int2ArrayReadonly, expectedResults[8]], + ["[[1],[2]]", int2DFixedArrayReadonly, expectedResults[9]], + ["[[1],[2]]", intTupleReadonly, expectedResults[10]], + ["[[1],[2],[3]]", intTupleRestReadonly, expectedResults[11]], + ["[[1]]", intTupleRestReadonly, expectedResults[12]], + ["[[1],[\"2\"],[\"3\"]]", intStringTupleRestReadonly, expectedResults[13]], + ["[[1]]", intStringTupleRestReadonly, expectedResults[14]], + ["[[1],[2]]", intTupleReadonly, expectedResults[15]], + ["[[1],[2]]", int2DFixedArrayReadonly, expectedResults[16]], + ["true", BooleanTypeReadonly, expectedResults[17]], + ["false", BooleanTypeReadonly, expectedResults[18]], + ["12", intTypeReadonly, expectedResults[19]], + ["12.3", floatTypeReadonly, expectedResults[20]], + ["12.3", decimalTypeReadonly, expectedResults[21]], + ["\"hello\"", stringTypeReadonly, expectedResults[22]], + ["\"h\"", charTypeReadonly, expectedResults[23]], + ["12", ByteTypeReadonly, expectedResults[24]], + ["13", intUnsigned8Readonly, expectedResults[25]], + ["14", intSigned8Readonly, expectedResults[26]], + ["15", intUnsigned16Readonly, expectedResults[27]], + ["16", intSigned16Readonly, expectedResults[28]], + ["17", intUnsigned32Readonly, expectedResults[29]], + ["18", intSigned32Readonly, expectedResults[30]], + ["null", NilTypeReadonly, expectedResults[31]], + [string `{"name": "John", "age": 30, "height": 1.8}`, Rec1ReadOnly, expectedResults[32]], + [string `{"student": {"name": "John", "age": 30, "height": 1.8}}`, Rec3ReadOnly, expectedResults[33]], + [string `{"isSingle": true, "address": "this is address", "count": 14,"student": {"name": "John", "age": 30, "height": 1.8}}`, Rec2ReadOnly, expectedResults[34]], + [string `{"department": "CSE", "studentCount": 3, "student": [{"name": "John", "age": 30, "height": 1.8}]}`, Rec4, expectedResults[35]], + [string `{"id": 12, "health": {"student": {"name": "John", "age": 30, "height": 1.8}, "address": "this is address", "count": 14}}`, Rec5, expectedResults[36]] + ]; +} + +@test:Config { + dataProvider: readonlyIntersectionTestDataForParseAsType +} +isolated function testReadOnlyIntersectionTypeAsExpTypForParseAsType(json sourceData, + typedesc expType, anydata expectedData) returns error? { + anydata result = check parseAsType(sourceData, {}, expType); + test:assertEquals(result, expectedData); +} + +function readonlyIntersectionTestDataForParseAsType() returns [json, typedesc, anydata][] { + return [ + [[1, 2, 3], intArrayReadonly, expectedResults[0]], + [[12, true, 123.4, "hello"], type1Readonly, expectedResults[1]], + [[12, 13], intArrayReadonly, expectedResults[2]], + [[[12], [13]], intArray2dReadonly, expectedResults[3]], + [{id: false, age: true}, type3Readonly, expectedResults[4]], + [{key1: {id: 12, age: 24}, key2: {id: 12, age: 24}}, type4Readonly, expectedResults[5]], + [[{id: 12, age: 24}, {id: 12, age: 24}], type5Readonly, expectedResults[6]], + [{key1: [12, 13], key2: [132, 133]}, mapIntArrayReadonly, expectedResults[7]], + [[12], int2ArrayReadonly, expectedResults[8]], + [[[1], [2]], int2DFixedArrayReadonly, expectedResults[9]], + [[[1], [2]], intTupleReadonly, expectedResults[10]], + [[[1], [2], [3]], intTupleRestReadonly, expectedResults[11]], + [[[1]], intTupleRestReadonly, expectedResults[12]], + [[[1], ["2"], ["3"]], intStringTupleRestReadonly, expectedResults[13]], + [[[1]], intStringTupleRestReadonly, expectedResults[14]], + [[[1], [2]], intTupleReadonly, expectedResults[15]], + [[[1], [2]], int2DFixedArrayReadonly, expectedResults[16]], + [true, BooleanTypeReadonly, expectedResults[17]], + [false, BooleanTypeReadonly, expectedResults[18]], + [12, intTypeReadonly, expectedResults[19]], + [12.3, floatTypeReadonly, expectedResults[20]], + [12.3, decimalTypeReadonly, expectedResults[21]], + ["hello", stringTypeReadonly, expectedResults[22]], + ["h", charTypeReadonly, expectedResults[23]], + [12, ByteTypeReadonly, expectedResults[24]], + [13, intUnsigned8Readonly, expectedResults[25]], + [14, intSigned8Readonly, expectedResults[26]], + [15, intUnsigned16Readonly, expectedResults[27]], + [16, intSigned16Readonly, expectedResults[28]], + [17, intUnsigned32Readonly, expectedResults[29]], + [18, intSigned32Readonly, expectedResults[30]], + [null, NilTypeReadonly, expectedResults[31]], + [{name: "John", "age": 30, "height": 1.8}, Rec1ReadOnly, expectedResults[32]], + [{"student": {"name": "John", "age": 30, "height": 1.8}}, Rec3ReadOnly, expectedResults[33]], + [ + { + "isSingle": true, + "address": "this is address", + "count": 14, + "student": {"name": "John", "age": 30, "height": 1.8} + }, + Rec2ReadOnly, + expectedResults[34] + ], + [{"department": "CSE", "studentCount": 3, "student": [{"name": "John", "age": 30, "height": 1.8}]}, Rec4, expectedResults[35]], + [{"id": 12, "health": {"student": {"name": "John", "age": 30, "height": 1.8}, "address": "this is address", "count": 14}}, Rec5, expectedResults[36]] + ]; +} diff --git a/ballerina/tests/stream_large_file_test.bal b/ballerina/tests/stream_large_file_test.bal new file mode 100644 index 0000000..f7314fb --- /dev/null +++ b/ballerina/tests/stream_large_file_test.bal @@ -0,0 +1,156 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import ballerina/io; +import ballerina/test; + +const LARGE_JSON_FILE = "build//resources//large_data.json"; +const POSTIONS = { + "Associate Tech Lead": 2000, + "Software Engineer": 1500, + "Intern": 200, + "Senior Software Engineer": 1800, + "Tech Lead": 3000, + "Architect": 5000 +}; +const PRODUCTS = ["IAM", "Ballerina", "MI", "APIM", "CHOREO", "ASGADIO"]; + +type CompanyR1 record {| + EmployeeR1[] employees; + CustomerR1[] customers; +|}; + +type EmployeeR1 record {| + int id; + string product; + string position; + int salary; +|}; + +type CustomerR1 record {| + int id; + string name; + string product; +|}; + +@test:BeforeSuite +function createLargeFile() returns error? { + io:WritableByteChannel wbc = check io:openWritableFile(LARGE_JSON_FILE); + string begin = string `{`; + string end = "}\n"; + _ = check wbc.write(begin.toBytes(), 0); + + _ = check wbc.write(string `"employees": + [ + `.toBytes(), 0); + _ = check wbc.write(createEmployee(0).toString().toBytes(), 0); + foreach int i in 1 ... 1000 { + _ = check wbc.write(",\n ".toBytes(), 0); + _ = check wbc.write(createEmployee(i).toString().toBytes(), 0); + } + _ = check wbc.write("\n ],\n".toBytes(), 0); + + _ = check wbc.write(string `"customers": + [ + `.toBytes(), 0); + _ = check wbc.write(createCustomer(0).toString().toBytes(), 0); + foreach int i in 1...1000 { + _ = check wbc.write(",\n ".toBytes(), 0); + _ = check wbc.write(createCustomer(i).toString().toBytes(), 0); + } + _ = check wbc.write("\n ]\n".toBytes(), 0); + + + _ = check wbc.write(end.toBytes(), 0); + _ = check wbc.close(); +} + +@test:Config +function testLargeFileStream() returns error? { + stream dataStream = check io:fileReadBlocksAsStream(LARGE_JSON_FILE); + CompanyR1 company = check parseStream(dataStream); + test:assertEquals(company.employees.length(), 1001); + test:assertEquals(company.customers.length(), 1001); + + test:assertEquals(company.employees[0].id, 0); + test:assertEquals(company.employees[0].product, "IAM"); + test:assertEquals(company.employees[0].position, "Associate Tech Lead"); + test:assertEquals(company.employees[0].salary, 2000); + test:assertEquals(company.customers[0].id, 0); + test:assertEquals(company.customers[0].name, "Customer0"); + test:assertEquals(company.customers[0].product, "IAM"); + + test:assertEquals(company.employees[1000].id, 1000); + test:assertEquals(company.employees[1000].product, "CHOREO"); + test:assertEquals(company.employees[1000].position, "Tech Lead"); + test:assertEquals(company.employees[1000].salary, 3000); + test:assertEquals(company.customers[1000].id, 1000); + test:assertEquals(company.customers[1000].name, "Customer1000"); + test:assertEquals(company.customers[1000].product, "CHOREO"); +} + +type EmployeeR2 record {| + int id; + string position; +|}; + +type CustomerR2 record {| + int id; + string product; +|}; + +@test:Config +function testLargeFileStreamWithProjection() returns error? { + stream dataStream = check io:fileReadBlocksAsStream(LARGE_JSON_FILE); + record {| + EmployeeR2[5] employees; + CustomerR2[9] customers; + |} company = check parseStream(dataStream); + test:assertEquals(company.employees.length(), 5); + test:assertEquals(company.customers.length(), 9); + + test:assertEquals(company.employees[0].length(), 2); + test:assertEquals(company.employees[0].id, 0); + test:assertEquals(company.employees[0].position, "Associate Tech Lead"); + test:assertEquals(company.customers[0].length(), 2); + test:assertEquals(company.customers[0].id, 0); + test:assertEquals(company.customers[0].product, "IAM"); + + test:assertEquals(company.employees[4].length(), 2); + test:assertEquals(company.employees[4].id, 4); + test:assertEquals(company.employees[4].position, "Tech Lead"); + test:assertEquals(company.customers[4].length(), 2); + test:assertEquals(company.customers[4].id, 4); + test:assertEquals(company.customers[4].product, "CHOREO"); +} + +function createEmployee(int id) returns EmployeeR1 { + string position = POSTIONS.keys()[id % POSTIONS.keys().length()]; + return { + "id": id, + "product": PRODUCTS[id % PRODUCTS.length()], + "position": position, + "salary": POSTIONS[position] ?: 0 + }; +} + +function createCustomer(int id) returns CustomerR1 { + return { + "id": id, + "name": "Customer" + id.toString(), + "product": PRODUCTS[id % PRODUCTS.length()] + }; +} diff --git a/ballerina/tests/to_json_test.bal b/ballerina/tests/to_json_test.bal new file mode 100644 index 0000000..92154ff --- /dev/null +++ b/ballerina/tests/to_json_test.bal @@ -0,0 +1,82 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import ballerina/test; + +@test:Config +function testToJsonWithBasicType() { + string name = "Kanth"; + json|Error j = toJson(name); + test:assertTrue(j is json); + test:assertEquals(j, "Kanth"); + + int age = 26; + json|Error j2 = toJson(age); + test:assertTrue(j2 is json); + test:assertEquals(j2, 26); + + float height = 5.6; + json|Error j3 = toJson(height); + test:assertTrue(j3 is json); + test:assertEquals(j3, 5.6); + + boolean isStudent = false; + json|Error j4 = toJson(isStudent); + test:assertTrue(j4 is json); + test:assertEquals(j4, false); + + json|Error j5 = toJson(()); + test:assertTrue(j5 is json); + test:assertEquals(j5, ()); +} + +type Student record { + string name; + int age; +}; + +@test:Config +function testToJsonWithRecord1() { + Student s = {name: "Kanth", age: 26}; + json|Error j = toJson(s); + test:assertTrue(j is json); + test:assertEquals(j, {name: "Kanth", age: 26}); +} + +type Address2 record {| + string country; + string city; + json...; +|}; + +@test:Config +function testToJsonWithRecord2() { + Address2 addr1 = {country: "x", city: "y", "street": "z", "no": 3}; + json|Error jsonaddr1 = toJson(addr1); + test:assertTrue(jsonaddr1 is json); + test:assertEquals(jsonaddr1, {country: "x", city: "y", "street": "z", "no": 3}); +} + +@test:Config +function testToJsonWithXML() { + xml x1 = xml ` + Some + Writer + `; + json|Error j = toJson(x1); + test:assertTrue(j is json); + test:assertEquals(j, x1.toString()); +} diff --git a/ballerina/tests/types.bal b/ballerina/tests/types.bal new file mode 100644 index 0000000..22bae49 --- /dev/null +++ b/ballerina/tests/types.bal @@ -0,0 +1,278 @@ +// Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). +// +// WSO2 LLC. licenses this file to you under the Apache License, +// Version 2.0 (the "License"); you may not use this file except +// in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +type OpenRecord record {}; + +type SimpleRec1 record {| + string a; + int b; +|}; + +type SimpleRec2 record { + string a; + int b; +}; + +type NestedRecord1 record {| + string a; + int b; + record {| + string d; + int e; + |} c; +|}; + +type NestedRecord2 record { + string a; + int b; + record {| + string d; + int e; + |} c; +}; + +type RestRecord1 record {| + string a; + anydata...; +|}; + +type RestRecord2 record {| + string a; + int...; +|}; + +type RestRecord3 record {| + string a; + int b; + record {| + int...; + |} c; +|}; + +type RestRecord4 record {| + string a; + int b; + record {| + decimal|float...; + |}...; +|}; + +type Address record { + string street; + string city; +}; + +type R record {| + int id; + string name; + Address address; +|}; + +type Company record { + map employees; +}; + +type Coordinates record { + float latitude; + float longitude; +}; + +type AddressWithCord record { + string street; + int zipcode; + Coordinates coordinates; +}; + +type Person record { + string name; + int age; + AddressWithCord address; +}; + +type Author record {| + string name; + string birthdate; + string hometown; + boolean...; +|}; + +type Publisher record {| + string name; + int year; + string...; +|}; + +type Book record {| + string title; + Author author; + Publisher publisher; + float...; +|}; + +type Book2 record { + int id; + @Name { + value: "title-name" + } + string title; + @Name { + value: "author-name" + } + string author; +}; + +type School record {| + string name; + int number; + boolean flag; + int...; +|}; + +type TestRecord record { + int intValue; + float floatValue; + string stringValue; + decimal decimalValue; +}; + +type SchoolAddress record { + string street; + string city; +}; + +type School1 record { + string name; + SchoolAddress address; +}; + +type Student1 record { + int id; + string name; + School1 school; +}; + +type Teacher record { + int id; + string name; +}; + +type Class record { + int id; + string name; + Student1 student; + Teacher teacher; + Student1? monitor; +}; + +type TestRecord2 record { + int intValue; + TestRecord nested1; +}; + +type TestR record {| + string street; + string city; +|}; + +type TestArr1 record { + string street; + string city; + int[] houses; +}; + +type TestArr2 record { + string street; + int city; + [int, string] house; +}; + +type TestArr3 record { + string street; + string city; + [int, int[3]] house; +}; + +type TestJson record { + string street; + json city; + boolean flag; +}; + +type IntArr int[]; + +type Tuple [int, string, [int, float]]; + +type BookA record {| + string title; + string author; +|}; + +type Library record { + BookA[2] books; +}; + +type Singleton1 1; + +type SingletonUnion Singleton1|2|"3"; + +type SingletonInRecord record {| + Singleton1 value; + SingletonUnion id; +|}; + +type PersonA record {| + string name; + record {|int...;|} measurements; +|}; + +type T1 (map|int|boolean)[]; +type T2 record {| + string p1; + map|int p2; +|}; + +//////// Types used for Negative cases ///////// + +type AddressN record { + string street; + string city; + int id; +}; + +type RN record {| + int id; + string name; + AddressN address; +|}; + +type RN2 record {| + int id; + string name; +|}; + +type Union int|float; + +type INTARR int[3]; +type INTTUPLE [int, int, int, int...]; + +type BookN record { + string title; + @Name { + value: "author" + } + string name; + string author; +}; diff --git a/build-config/checkstyle/build.gradle b/build-config/checkstyle/build.gradle new file mode 100644 index 0000000..83901a7 --- /dev/null +++ b/build-config/checkstyle/build.gradle @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2024, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +plugins { + id "de.undercouch.download" +} + +apply plugin: 'java' + +task downloadCheckstyleRuleFiles(type: Download) { + src([ + 'https://raw.githubusercontent.com/wso2/code-quality-tools/v1.4/checkstyle/jdk-17/checkstyle.xml', + 'https://raw.githubusercontent.com/wso2/code-quality-tools/v1.4/checkstyle/jdk-17/suppressions.xml' + ]) + overwrite false + onlyIfNewer true + dest buildDir +} + +jar { + enabled = false +} + +clean { + enabled = false +} + +artifacts.add('default', file("$project.buildDir/checkstyle.xml")) { + builtBy('downloadCheckstyleRuleFiles') +} + +artifacts.add('default', file("$project.buildDir/suppressions.xml")) { + builtBy('downloadCheckstyleRuleFiles') +} diff --git a/build-config/resources/Ballerina.toml b/build-config/resources/Ballerina.toml new file mode 100644 index 0000000..4877db5 --- /dev/null +++ b/build-config/resources/Ballerina.toml @@ -0,0 +1,18 @@ +[package] +org = "ballerina" +name = "data.jsondata" +version = "@toml.version@" +authors = ["Ballerina"] +keywords = ["json"] +repository = "https://github.com/ballerina-platform/module-ballerina.jsondata" +license = ["Apache-2.0"] +distribution = "2201.8.4" + +[platform.java17] +graalvmCompatible = true + +[[platform.java17.dependency]] +groupId = "io.ballerina.lib" +artifactId = "jsondata-native" +version = "@toml.version@" +path = "../native/build/libs/data.jsondata-native-@project.version@.jar" diff --git a/build-config/resources/CompilerPlugin.toml b/build-config/resources/CompilerPlugin.toml new file mode 100644 index 0000000..e2678ce --- /dev/null +++ b/build-config/resources/CompilerPlugin.toml @@ -0,0 +1,6 @@ +[plugin] +id = "constraint-compiler-plugin" +class = "io.ballerina.lib.data.jsondata.compiler.JsondataCompilerPlugin" + +[[dependency]] +path = "../compiler-plugin/build/libs/data.jsondata-compiler-plugin-@project.version@.jar" diff --git a/build.gradle b/build.gradle new file mode 100644 index 0000000..2551eb1 --- /dev/null +++ b/build.gradle @@ -0,0 +1,92 @@ +/** + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +plugins { + id "com.github.spotbugs" version "${githubSpotbugsVersion}" + id "com.github.johnrengelman.shadow" version "${githubJohnrengelmanShadowVersion}" + id "de.undercouch.download" version "${underCouchDownloadVersion}" + id "net.researchgate.release" version "${researchgateReleaseVersion}" +} + +allprojects { + group = project.group + version = project.version + + apply plugin: 'jacoco' + apply plugin: 'maven-publish' + + repositories { + mavenLocal() + maven { + url = 'https://maven.wso2.org/nexus/content/repositories/releases/' + } + + maven { + url = 'https://maven.wso2.org/nexus/content/groups/wso2-public/' + } + + maven { + url = 'https://repo.maven.apache.org/maven2' + } + + maven { + url = 'https://maven.pkg.github.com/ballerina-platform/ballerina-lang' + credentials { + username System.getenv("packageUser") + password System.getenv("packagePAT") + } + } + } + + ext { + snapshotVersion= '-SNAPSHOT' + timestampedVersionRegex = '.*-\\d{8}-\\d{6}-\\w.*\$' + } +} + +subprojects { + + configurations { + ballerinaStdLibs + } + + dependencies { + /* Standard libraries */ + ballerinaStdLibs "io.ballerina.stdlib:io-ballerina:${stdlibIoVersion}" + } +} + +def moduleVersion = project.version.replace("-SNAPSHOT", "") + +release { + failOnPublishNeeded = false + + buildTasks = ["build"] + failOnSnapshotDependencies = true + versionPropertyFile = 'gradle.properties' + tagTemplate = 'v$version' + + git { + requireBranch = "release-${moduleVersion}" + pushToRemote = 'origin' + } +} + +task build { + dependsOn('data.jsondata-ballerina:build') +} diff --git a/compiler-plugin-test/build.gradle b/compiler-plugin-test/build.gradle new file mode 100644 index 0000000..efc4838 --- /dev/null +++ b/compiler-plugin-test/build.gradle @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (http://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +plugins { + id 'java' + id 'checkstyle' + id 'com.github.spotbugs' +} + +description = 'Ballerina - Jsondata Compiler Plugin Tests' + +dependencies { + checkstyle project(':checkstyle') + checkstyle "com.puppycrawl.tools:checkstyle:${puppycrawlCheckstyleVersion}" + + implementation project(':data.jsondata-compiler-plugin') + + testImplementation group: 'org.ballerinalang', name: 'ballerina-lang', version: "${ballerinaLangVersion}" + testImplementation group: 'org.ballerinalang', name: 'ballerina-tools-api', version: "${ballerinaLangVersion}" + testImplementation group: 'org.ballerinalang', name: 'ballerina-parser', version: "${ballerinaLangVersion}" + testImplementation group: 'org.testng', name: 'testng', version: "${testngVersion}" +} + +tasks.withType(Checkstyle) { + exclude '**/module-info.java' +} + +checkstyle { + toolVersion "${project.puppycrawlCheckstyleVersion}" + configFile rootProject.file("build-config/checkstyle/build/checkstyle.xml") + configProperties = ["suppressionFile" : file("${rootDir}/build-config/checkstyle/build/suppressions.xml")] +} + +checkstyleTest.dependsOn(":checkstyle:downloadCheckstyleRuleFiles") + +spotbugsTest { + effort "max" + reportLevel "low" + reportsDir = file("$project.buildDir/reports/spotbugs") + reports { + html.enabled true + text.enabled = true + } + def excludeFile = file("${project.projectDir}/spotbugs-exclude.xml") + if(excludeFile.exists()) { + excludeFilter = excludeFile + } +} + +spotbugsMain { + enabled false +} + +checkstyleMain { + enabled false +} + +compileJava { + doFirst { + options.compilerArgs = [ + '--module-path', classpath.asPath, + ] + classpath = files() + } +} + +test { + systemProperty "ballerina.offline.flag", "true" + useTestNG() + finalizedBy jacocoTestReport +} + +jacocoTestReport { + dependsOn test + reports { + xml.required = true + } + sourceSets project(':data.jsondata-compiler-plugin').sourceSets.main +} + +test.dependsOn ":data.jsondata-ballerina:build" diff --git a/compiler-plugin-test/src/test/java/io/ballerina/lib/data/jsondata/compiler/CompilerPluginTest.java b/compiler-plugin-test/src/test/java/io/ballerina/lib/data/jsondata/compiler/CompilerPluginTest.java new file mode 100644 index 0000000..bf49165 --- /dev/null +++ b/compiler-plugin-test/src/test/java/io/ballerina/lib/data/jsondata/compiler/CompilerPluginTest.java @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.compiler; + +import io.ballerina.projects.DiagnosticResult; +import io.ballerina.tools.diagnostics.Diagnostic; +import io.ballerina.tools.diagnostics.DiagnosticSeverity; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.List; +import java.util.stream.Collectors; + +/** + * This class includes tests for Ballerina Jsondata compiler plugin. + */ +public class CompilerPluginTest { + + static final String UNSUPPORTED_UNION_TYPE = + "unsupported union type: union type does not support multiple complex types"; + + @Test + public void testInvalidExpectedUnionType1() { + DiagnosticResult diagnosticResult = + CompilerPluginTestUtils.loadPackage("sample_package_1").getCompilation().diagnosticResult(); + List errorDiagnosticsList = diagnosticResult.diagnostics().stream() + .filter(r -> r.diagnosticInfo().severity().equals(DiagnosticSeverity.ERROR)) + .collect(Collectors.toList()); + Assert.assertEquals(errorDiagnosticsList.size(), 1); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), UNSUPPORTED_UNION_TYPE); + } + + @Test + public void testInvalidExpectedUnionType2() { + DiagnosticResult diagnosticResult = + CompilerPluginTestUtils.loadPackage("sample_package_2").getCompilation().diagnosticResult(); + List errorDiagnosticsList = diagnosticResult.diagnostics().stream() + .filter(r -> r.diagnosticInfo().severity().equals(DiagnosticSeverity.ERROR)) + .collect(Collectors.toList()); + Assert.assertEquals(errorDiagnosticsList.size(), 1); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), UNSUPPORTED_UNION_TYPE); + } + + @Test + public void testInvalidRecordFieldType1() { + DiagnosticResult diagnosticResult = + CompilerPluginTestUtils.loadPackage("sample_package_3").getCompilation().diagnosticResult(); + List errorDiagnosticsList = diagnosticResult.diagnostics().stream() + .filter(r -> r.diagnosticInfo().severity().equals(DiagnosticSeverity.ERROR)) + .collect(Collectors.toList()); + Assert.assertEquals(errorDiagnosticsList.size(), 2); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), UNSUPPORTED_UNION_TYPE); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), UNSUPPORTED_UNION_TYPE); + } + + @Test + public void testInvalidRecordFieldType2() { + DiagnosticResult diagnosticResult = + CompilerPluginTestUtils.loadPackage("sample_package_4").getCompilation().diagnosticResult(); + List errorDiagnosticsList = diagnosticResult.diagnostics().stream() + .filter(r -> r.diagnosticInfo().severity().equals(DiagnosticSeverity.ERROR)) + .collect(Collectors.toList()); + Assert.assertEquals(errorDiagnosticsList.size(), 2); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), UNSUPPORTED_UNION_TYPE); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), UNSUPPORTED_UNION_TYPE); + } + + @Test + public void testDuplicateField1() { + DiagnosticResult diagnosticResult = + CompilerPluginTestUtils.loadPackage("sample_package_5").getCompilation().diagnosticResult(); + List errorDiagnosticsList = diagnosticResult.diagnostics().stream() + .filter(r -> r.diagnosticInfo().severity().equals(DiagnosticSeverity.ERROR)) + .collect(Collectors.toList()); + Assert.assertEquals(errorDiagnosticsList.size(), 1); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), + "invalid field: duplicate field found"); + } + + @Test + public void testDuplicateField2() { + DiagnosticResult diagnosticResult = + CompilerPluginTestUtils.loadPackage("sample_package_6").getCompilation().diagnosticResult(); + List errorDiagnosticsList = diagnosticResult.diagnostics().stream() + .filter(r -> r.diagnosticInfo().severity().equals(DiagnosticSeverity.ERROR)) + .collect(Collectors.toList()); + Assert.assertEquals(errorDiagnosticsList.size(), 2); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), + "invalid field: duplicate field found"); + Assert.assertEquals(errorDiagnosticsList.get(1).diagnosticInfo().messageFormat(), + "invalid field: duplicate field found"); + } + + @Test + public void testComplexUnionTypeAsExpectedType() { + DiagnosticResult diagnosticResult = + CompilerPluginTestUtils.loadPackage("sample_package_7").getCompilation().diagnosticResult(); + List errorDiagnosticsList = diagnosticResult.diagnostics().stream() + .filter(r -> r.diagnosticInfo().severity().equals(DiagnosticSeverity.ERROR)) + .collect(Collectors.toList()); + Assert.assertEquals(errorDiagnosticsList.size(), 2); + Assert.assertEquals(errorDiagnosticsList.get(0).diagnosticInfo().messageFormat(), + "unsupported union type: union type does not support multiple complex types"); + Assert.assertEquals(errorDiagnosticsList.get(1).diagnosticInfo().messageFormat(), + "unsupported union type: union type does not support multiple complex types"); + } +} diff --git a/compiler-plugin-test/src/test/java/io/ballerina/lib/data/jsondata/compiler/CompilerPluginTestUtils.java b/compiler-plugin-test/src/test/java/io/ballerina/lib/data/jsondata/compiler/CompilerPluginTestUtils.java new file mode 100644 index 0000000..e2609b7 --- /dev/null +++ b/compiler-plugin-test/src/test/java/io/ballerina/lib/data/jsondata/compiler/CompilerPluginTestUtils.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.compiler; + +import io.ballerina.projects.Package; +import io.ballerina.projects.ProjectEnvironmentBuilder; +import io.ballerina.projects.directory.BuildProject; +import io.ballerina.projects.environment.Environment; +import io.ballerina.projects.environment.EnvironmentBuilder; + +import java.nio.file.Path; +import java.nio.file.Paths; + +/** + * Utility functions related to compiler plugins tests. + */ +public class CompilerPluginTestUtils { + private static final Path RESOURCE_DIRECTORY = Paths.get("src", "test", "resources", "ballerina_sources") + .toAbsolutePath(); + private static final Path DISTRIBUTION_PATH = Paths.get("../", "target", "ballerina-runtime") + .toAbsolutePath(); + + static Package loadPackage(String path) { + Path projectDirPath = RESOURCE_DIRECTORY.resolve(path); + Environment environment = EnvironmentBuilder.getBuilder().setBallerinaHome(DISTRIBUTION_PATH).build(); + ProjectEnvironmentBuilder projectEnvironmentBuilder = ProjectEnvironmentBuilder.getBuilder(environment); + BuildProject project = BuildProject.load(projectEnvironmentBuilder, projectDirPath); + return project.currentPackage(); + } +} diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_1/Ballerina.toml b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_1/Ballerina.toml new file mode 100644 index 0000000..f38a465 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_1/Ballerina.toml @@ -0,0 +1,4 @@ +[package] +org = "jsondata_test" +name = "sample_1" +version = "0.1.0" diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_1/sample.bal b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_1/sample.bal new file mode 100644 index 0000000..1b525ab --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_1/sample.bal @@ -0,0 +1,5 @@ +import ballerina/data.jsondata; + +public function main() returns error? { + int|record {| int a;|}|record {| int b;|} val = check jsondata:parseString("1"); +} diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_2/Ballerina.toml b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_2/Ballerina.toml new file mode 100644 index 0000000..a662ba2 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_2/Ballerina.toml @@ -0,0 +1,4 @@ +[package] +org = "jsondata_test" +name = "sample_2" +version = "0.1.0" diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_2/sample.bal b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_2/sample.bal new file mode 100644 index 0000000..e4ea557 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_2/sample.bal @@ -0,0 +1,7 @@ +import ballerina/data.jsondata; + +type Union int|record {| int a;|}|record {| int b;|}; + +public function main() returns error? { + Union val = check jsondata:parseString("1"); +} diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_3/Ballerina.toml b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_3/Ballerina.toml new file mode 100644 index 0000000..2f1fe15 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_3/Ballerina.toml @@ -0,0 +1,4 @@ +[package] +org = "jsondata_test" +name = "sample_3" +version = "0.1.0" diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_3/sample.bal b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_3/sample.bal new file mode 100644 index 0000000..c080f6b --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_3/sample.bal @@ -0,0 +1,23 @@ +import ballerina/data.jsondata; + +type Person record {| + string? name; + record {|string street; string country;|}|map address; + record {|string street; string country;|}|json company; +|}; + + +public function main() returns error? { + string str = string `{ + "name": "John", + "address": { + "street": "Main Street", + "country": "USA" + }, + "company": { + "street": "Main Street", + "country": "USA" + } + }`; + Person _ = check jsondata:parseString(str); +} diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_4/Ballerina.toml b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_4/Ballerina.toml new file mode 100644 index 0000000..18f1274 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_4/Ballerina.toml @@ -0,0 +1,4 @@ +[package] +org = "jsondata_test" +name = "sample_4" +version = "0.1.0" diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_4/sample.bal b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_4/sample.bal new file mode 100644 index 0000000..304e361 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_4/sample.bal @@ -0,0 +1,20 @@ +import ballerina/data.jsondata; + +type Person record {| + string? name; + record {|string street; string country;|}|map address; + record {|string street; string country;|}|json company; +|}; + +string str = string `{ + "name": "John", + "address": { + "street": "Main Street", + "country": "USA" + }, + "company": { + "street": "Main Street", + "country": "USA" + } + }`; +Person _ = check jsondata:parseString(str); diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_5/Ballerina.toml b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_5/Ballerina.toml new file mode 100644 index 0000000..7f7a2f8 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_5/Ballerina.toml @@ -0,0 +1,4 @@ +[package] +org = "jsondata_test" +name = "sample_5" +version = "0.1.0" diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_5/sample.bal b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_5/sample.bal new file mode 100644 index 0000000..2e1eb62 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_5/sample.bal @@ -0,0 +1,9 @@ +import ballerina/data.jsondata; + +type Data record { + @jsondata:Name { + value: "B" + } + string A; + string B; +}; diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_6/Ballerina.toml b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_6/Ballerina.toml new file mode 100644 index 0000000..9ab9d0d --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_6/Ballerina.toml @@ -0,0 +1,4 @@ +[package] +org = "jsondata_test" +name = "sample_6" +version = "0.1.0" diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_6/sample.bal b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_6/sample.bal new file mode 100644 index 0000000..933b17c --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_6/sample.bal @@ -0,0 +1,22 @@ +import ballerina/data.jsondata; + +public function main() returns error? { + record { + @jsondata:Name { + value: "B" + } + string A; + string B; + } _ = check jsondata:parseAsType({ + "A": "Hello", + "B": "World" + }); + + record { + @jsondata:Name { + value: "B" + } + string A; + string B; + } _ = {A: "Hello", B: "World"}; +} diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_7/Ballerina.toml b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_7/Ballerina.toml new file mode 100644 index 0000000..203de45 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_7/Ballerina.toml @@ -0,0 +1,4 @@ +[package] +org = "jsondata_test" +name = "sample_7" +version = "0.1.0" diff --git a/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_7/sample.bal b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_7/sample.bal new file mode 100644 index 0000000..853e06a --- /dev/null +++ b/compiler-plugin-test/src/test/resources/ballerina_sources/sample_package_7/sample.bal @@ -0,0 +1,31 @@ +import ballerina/data.jsondata; + +type T1 (map|int|boolean)[]; +type T2 record {| + string p1; + map|int p2; +|}; + +public function main() returns error? { + string str1 = string `[ + { + "p1":"v1", + "p2":1 + }, + { + "p1":"v2", + "p2":true + } + ]`; + T1 _ = check jsondata:parseString(str1); + + string str2 = string ` + { + "p1":"v1", + "p2": { + "a": 1, + "b": 2 + } + }`; + T2 _ = check jsondata:parseString(str2); +} diff --git a/compiler-plugin-test/src/test/resources/testng.xml b/compiler-plugin-test/src/test/resources/testng.xml new file mode 100644 index 0000000..402d393 --- /dev/null +++ b/compiler-plugin-test/src/test/resources/testng.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + diff --git a/compiler-plugin/build.gradle b/compiler-plugin/build.gradle new file mode 100644 index 0000000..0aed114 --- /dev/null +++ b/compiler-plugin/build.gradle @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (http://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +plugins { + id 'java' + id 'checkstyle' + id 'com.github.spotbugs' +} + +description = 'Ballerina - JsonData Compiler Plugin' + +dependencies { + checkstyle project(':checkstyle') + checkstyle "com.puppycrawl.tools:checkstyle:${puppycrawlCheckstyleVersion}" + + implementation group: 'org.ballerinalang', name: 'ballerina-lang', version: "${ballerinaLangVersion}" + implementation group: 'org.ballerinalang', name: 'ballerina-tools-api', version: "${ballerinaLangVersion}" + implementation group: 'org.ballerinalang', name: 'ballerina-parser', version: "${ballerinaLangVersion}" +} + +def excludePattern = '**/module-info.java' +tasks.withType(Checkstyle) { + exclude excludePattern +} + +checkstyle { + toolVersion "${project.puppycrawlCheckstyleVersion}" + configFile rootProject.file("build-config/checkstyle/build/checkstyle.xml") + configProperties = ["suppressionFile" : file("${rootDir}/build-config/checkstyle/build/suppressions.xml")] +} + +checkstyleMain.dependsOn(":checkstyle:downloadCheckstyleRuleFiles") + +spotbugsMain { + effort "max" + reportLevel "low" + reportsDir = file("$project.buildDir/reports/spotbugs") + reports { + html.enabled true + text.enabled = true + } + def excludeFile = file("${rootDir}/spotbugs-exclude.xml") + if(excludeFile.exists()) { + excludeFilter = excludeFile + } +} + +spotbugsMain { + enabled false +} + +compileJava { + doFirst { + options.compilerArgs = [ + '--module-path', classpath.asPath, + ] + classpath = files() + } +} diff --git a/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/Constants.java b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/Constants.java new file mode 100644 index 0000000..9478613 --- /dev/null +++ b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/Constants.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.compiler; + +/** + * Constants for Jsondata's compiler plugin. + * + * @since 0.1.0 + */ +public class Constants { + static final String PARSE_STRING = "parseString"; + static final String PARSE_BYTES = "parseBytes"; + static final String PARSE_STREAM = "parseStream"; + static final String NAME = "Name"; + static final String JSONDATA = "jsondata"; +} diff --git a/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataCodeAnalyzer.java b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataCodeAnalyzer.java new file mode 100644 index 0000000..058f8ee --- /dev/null +++ b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataCodeAnalyzer.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.compiler; + +import io.ballerina.compiler.syntax.tree.SyntaxKind; +import io.ballerina.projects.plugins.CodeAnalysisContext; +import io.ballerina.projects.plugins.CodeAnalyzer; + +import java.util.List; + +/** + * Jsondata Code Analyzer. + * + * @since 0.1.0 + */ +public class JsondataCodeAnalyzer extends CodeAnalyzer { + @Override + public void init(CodeAnalysisContext codeAnalysisContext) { + codeAnalysisContext.addSyntaxNodeAnalysisTask(new JsondataTypeValidator(), + List.of(SyntaxKind.MODULE_PART)); + } +} diff --git a/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataCompilerPlugin.java b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataCompilerPlugin.java new file mode 100644 index 0000000..870fffc --- /dev/null +++ b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataCompilerPlugin.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.compiler; + +import io.ballerina.projects.plugins.CompilerPlugin; +import io.ballerina.projects.plugins.CompilerPluginContext; + +/** + * Compiler plugin for Jsondata's utils functions. + * + * @since 0.1.0 + */ +public class JsondataCompilerPlugin extends CompilerPlugin { + + @Override + public void init(CompilerPluginContext compilerPluginContext) { + compilerPluginContext.addCodeAnalyzer(new JsondataCodeAnalyzer()); + } +} diff --git a/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataDiagnosticCodes.java b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataDiagnosticCodes.java new file mode 100644 index 0000000..017084f --- /dev/null +++ b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataDiagnosticCodes.java @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.compiler; + +import io.ballerina.tools.diagnostics.DiagnosticSeverity; + +import static io.ballerina.tools.diagnostics.DiagnosticSeverity.ERROR; + +/** + * Diagnostic codes for Jsondata's compiler plugin. + * + * @since 0.1.0 + */ +public enum JsondataDiagnosticCodes { + + UNSUPPORTED_UNION_TYPE("JSON_ERROR_201", + "unsupported union type: union type does not support multiple complex types", ERROR), + DUPLICATE_FIELD("JSON_ERROR_202", "invalid field: duplicate field found", ERROR); + + private final String code; + private final String message; + private final DiagnosticSeverity severity; + + JsondataDiagnosticCodes(String code, String message, DiagnosticSeverity severity) { + this.code = code; + this.message = message; + this.severity = severity; + } + + public String getCode() { + return code; + } + + public String getMessage() { + return message; + } + + public DiagnosticSeverity getSeverity() { + return severity; + } +} diff --git a/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataTypeValidator.java b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataTypeValidator.java new file mode 100644 index 0000000..d6285b2 --- /dev/null +++ b/compiler-plugin/src/main/java/io/ballerina/lib/data/jsondata/compiler/JsondataTypeValidator.java @@ -0,0 +1,292 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.compiler; + +import io.ballerina.compiler.api.SemanticModel; +import io.ballerina.compiler.api.symbols.AnnotationAttachmentSymbol; +import io.ballerina.compiler.api.symbols.AnnotationSymbol; +import io.ballerina.compiler.api.symbols.ArrayTypeSymbol; +import io.ballerina.compiler.api.symbols.ModuleSymbol; +import io.ballerina.compiler.api.symbols.RecordFieldSymbol; +import io.ballerina.compiler.api.symbols.RecordTypeSymbol; +import io.ballerina.compiler.api.symbols.Symbol; +import io.ballerina.compiler.api.symbols.TupleTypeSymbol; +import io.ballerina.compiler.api.symbols.TypeDefinitionSymbol; +import io.ballerina.compiler.api.symbols.TypeDescKind; +import io.ballerina.compiler.api.symbols.TypeReferenceTypeSymbol; +import io.ballerina.compiler.api.symbols.TypeSymbol; +import io.ballerina.compiler.api.symbols.UnionTypeSymbol; +import io.ballerina.compiler.api.symbols.VariableSymbol; +import io.ballerina.compiler.syntax.tree.CheckExpressionNode; +import io.ballerina.compiler.syntax.tree.ChildNodeList; +import io.ballerina.compiler.syntax.tree.ExpressionNode; +import io.ballerina.compiler.syntax.tree.FunctionCallExpressionNode; +import io.ballerina.compiler.syntax.tree.FunctionDefinitionNode; +import io.ballerina.compiler.syntax.tree.ModuleMemberDeclarationNode; +import io.ballerina.compiler.syntax.tree.ModulePartNode; +import io.ballerina.compiler.syntax.tree.ModuleVariableDeclarationNode; +import io.ballerina.compiler.syntax.tree.Node; +import io.ballerina.compiler.syntax.tree.SyntaxKind; +import io.ballerina.compiler.syntax.tree.TypeDefinitionNode; +import io.ballerina.compiler.syntax.tree.VariableDeclarationNode; +import io.ballerina.projects.plugins.AnalysisTask; +import io.ballerina.projects.plugins.SyntaxNodeAnalysisContext; +import io.ballerina.tools.diagnostics.Diagnostic; +import io.ballerina.tools.diagnostics.DiagnosticFactory; +import io.ballerina.tools.diagnostics.DiagnosticInfo; +import io.ballerina.tools.diagnostics.DiagnosticSeverity; +import io.ballerina.tools.diagnostics.Location; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +/** + * Jsondata Record Field Validator. + * + * @since 0.1.0 + */ +public class JsondataTypeValidator implements AnalysisTask { + + private SemanticModel semanticModel; + private final HashMap allDiagnosticInfo = new HashMap<>(); + Location currentLocation; + + @Override + public void perform(SyntaxNodeAnalysisContext ctx) { + semanticModel = ctx.semanticModel(); + List diagnostics = semanticModel.diagnostics(); + boolean erroneousCompilation = diagnostics.stream() + .anyMatch(d -> d.diagnosticInfo().severity().equals(DiagnosticSeverity.ERROR)); + if (erroneousCompilation) { + return; + } + + ModulePartNode rootNode = (ModulePartNode) ctx.node(); + for (ModuleMemberDeclarationNode member : rootNode.members()) { + switch (member.kind()) { + case FUNCTION_DEFINITION -> processFunctionDefinitionNode((FunctionDefinitionNode) member, ctx); + case MODULE_VAR_DECL -> + processModuleVariableDeclarationNode((ModuleVariableDeclarationNode) member, ctx); + case TYPE_DEFINITION -> + processTypeDefinitionNode((TypeDefinitionNode) member, ctx); + } + } + } + + private void processFunctionDefinitionNode(FunctionDefinitionNode functionDefinitionNode, + SyntaxNodeAnalysisContext ctx) { + ChildNodeList childNodeList = functionDefinitionNode.functionBody().children(); + for (Node node : childNodeList) { + if (node.kind() != SyntaxKind.LOCAL_VAR_DECL) { + continue; + } + VariableDeclarationNode variableDeclarationNode = (VariableDeclarationNode) node; + Optional initializer = variableDeclarationNode.initializer(); + if (initializer.isEmpty()) { + continue; + } + + currentLocation = variableDeclarationNode.typedBindingPattern().typeDescriptor().location(); + Optional symbol = semanticModel.symbol(variableDeclarationNode.typedBindingPattern()); + if (symbol.isEmpty()) { + continue; + } + + TypeSymbol typeSymbol = ((VariableSymbol) symbol.get()).typeDescriptor(); + if (!isParseFunctionOfStringSource(initializer.get())) { + if (typeSymbol.typeKind() == TypeDescKind.RECORD) { + detectDuplicateFields((RecordTypeSymbol) typeSymbol, ctx); + } + continue; + } + + validateExpectedType(typeSymbol, ctx); + } + } + + private boolean isParseFunctionOfStringSource(ExpressionNode expressionNode) { + if (expressionNode.kind() == SyntaxKind.CHECK_EXPRESSION) { + expressionNode = ((CheckExpressionNode) expressionNode).expression(); + } + + if (expressionNode.kind() != SyntaxKind.FUNCTION_CALL) { + return false; + } + String functionName = ((FunctionCallExpressionNode) expressionNode).functionName().toString().trim(); + return functionName.contains(Constants.PARSE_STRING) || functionName.contains(Constants.PARSE_BYTES) + || functionName.contains(Constants.PARSE_STREAM); + } + + private void validateExpectedType(TypeSymbol typeSymbol, SyntaxNodeAnalysisContext ctx) { + typeSymbol.getLocation().ifPresent(location -> currentLocation = location); + switch (typeSymbol.typeKind()) { + case UNION -> validateUnionType((UnionTypeSymbol) typeSymbol, typeSymbol.getLocation(), ctx); + case RECORD -> validateRecordType((RecordTypeSymbol) typeSymbol, ctx); + case ARRAY -> validateExpectedType(((ArrayTypeSymbol) typeSymbol).memberTypeDescriptor(), ctx); + case TUPLE -> validateTupleType((TupleTypeSymbol) typeSymbol, ctx); + case TYPE_REFERENCE -> validateExpectedType(((TypeReferenceTypeSymbol) typeSymbol).typeDescriptor(), ctx); + } + } + + private void validateTupleType(TupleTypeSymbol tupleTypeSymbol, SyntaxNodeAnalysisContext ctx) { + for (TypeSymbol memberType : tupleTypeSymbol.memberTypeDescriptors()) { + validateExpectedType(memberType, ctx); + } + } + + private void validateRecordType(RecordTypeSymbol recordTypeSymbol, SyntaxNodeAnalysisContext ctx) { + detectDuplicateFields(recordTypeSymbol, ctx); + + for (Map.Entry entry : recordTypeSymbol.fieldDescriptors().entrySet()) { + RecordFieldSymbol fieldSymbol = entry.getValue(); + validateRecordFieldType(fieldSymbol.typeDescriptor(), fieldSymbol.getLocation(), ctx); + } + } + + private void validateRecordFieldType(TypeSymbol typeSymbol, Optional location, + SyntaxNodeAnalysisContext ctx) { + switch (typeSymbol.typeKind()) { + case UNION -> validateUnionType((UnionTypeSymbol) typeSymbol, location, ctx); + case ARRAY -> validateRecordFieldType(((ArrayTypeSymbol) typeSymbol).memberTypeDescriptor(), location, ctx); + case TYPE_REFERENCE -> + validateRecordFieldType(((TypeReferenceTypeSymbol) typeSymbol).typeDescriptor(), location, ctx); + } + } + + private void validateUnionType(UnionTypeSymbol unionTypeSymbol, Optional location, + SyntaxNodeAnalysisContext ctx) { + int nonPrimitiveMemberCount = 0; + List memberTypeSymbols = unionTypeSymbol.memberTypeDescriptors(); + for (TypeSymbol memberTypeSymbol : memberTypeSymbols) { + if (isSupportedUnionMemberType(memberTypeSymbol)) { + continue; + } + nonPrimitiveMemberCount++; + } + + if (nonPrimitiveMemberCount >= 1) { + reportDiagnosticInfo(ctx, location, JsondataDiagnosticCodes.UNSUPPORTED_UNION_TYPE); + } + } + + private boolean isSupportedUnionMemberType(TypeSymbol typeSymbol) { + TypeDescKind kind = typeSymbol.typeKind(); + if (kind == TypeDescKind.TYPE_REFERENCE) { + kind = ((TypeReferenceTypeSymbol) typeSymbol).typeDescriptor().typeKind(); + } + + switch (kind) { + case INT, FLOAT, DECIMAL, STRING, BOOLEAN, BYTE, NIL, SINGLETON, ERROR -> { + return true; + } + default -> { + return false; + } + } + } + + private void reportDiagnosticInfo(SyntaxNodeAnalysisContext ctx, Optional location, + JsondataDiagnosticCodes diagnosticsCodes) { + Location pos = location.orElseGet(() -> currentLocation); + DiagnosticInfo diagnosticInfo = new DiagnosticInfo(diagnosticsCodes.getCode(), + diagnosticsCodes.getMessage(), diagnosticsCodes.getSeverity()); + if (allDiagnosticInfo.containsKey(pos) && allDiagnosticInfo.get(pos).equals(diagnosticInfo)) { + return; + } + allDiagnosticInfo.put(pos, diagnosticInfo); + ctx.reportDiagnostic(DiagnosticFactory.createDiagnostic(diagnosticInfo, pos)); + } + + private void processModuleVariableDeclarationNode(ModuleVariableDeclarationNode moduleVariableDeclarationNode, + SyntaxNodeAnalysisContext ctx) { + Optional initializer = moduleVariableDeclarationNode.initializer(); + if (initializer.isEmpty() || !isParseFunctionOfStringSource(initializer.get())) { + return; + } + + Optional symbol = semanticModel.symbol(moduleVariableDeclarationNode.typedBindingPattern()); + if (symbol.isEmpty()) { + return; + } + validateExpectedType(((VariableSymbol) symbol.get()).typeDescriptor(), ctx); + } + + private void processTypeDefinitionNode(TypeDefinitionNode typeDefinitionNode, SyntaxNodeAnalysisContext ctx) { + Node typeDescriptor = typeDefinitionNode.typeDescriptor(); + if (typeDescriptor.kind() != SyntaxKind.RECORD_TYPE_DESC) { + return; + } + validateRecordTypeDefinition(typeDefinitionNode, ctx); + } + + private void validateRecordTypeDefinition(TypeDefinitionNode typeDefinitionNode, SyntaxNodeAnalysisContext ctx) { + Optional symbol = semanticModel.symbol(typeDefinitionNode); + if (symbol.isEmpty()) { + return; + } + TypeDefinitionSymbol typeDefinitionSymbol = (TypeDefinitionSymbol) symbol.get(); + detectDuplicateFields((RecordTypeSymbol) typeDefinitionSymbol.typeDescriptor(), ctx); + } + + private void detectDuplicateFields(RecordTypeSymbol recordTypeSymbol, SyntaxNodeAnalysisContext ctx) { + List fieldMembers = new ArrayList<>(); + for (Map.Entry entry : recordTypeSymbol.fieldDescriptors().entrySet()) { + RecordFieldSymbol fieldSymbol = entry.getValue(); + String name = getNameFromAnnotation(entry.getKey(), fieldSymbol.annotAttachments()); + if (fieldMembers.contains(name)) { + reportDiagnosticInfo(ctx, fieldSymbol.getLocation(), JsondataDiagnosticCodes.DUPLICATE_FIELD); + return; + } + fieldMembers.add(name); + } + } + + private String getNameFromAnnotation(String fieldName, + List annotationAttachments) { + for (AnnotationAttachmentSymbol annotAttSymbol : annotationAttachments) { + AnnotationSymbol annotation = annotAttSymbol.typeDescriptor(); + if (!getAnnotModuleName(annotation).contains(Constants.JSONDATA)) { + continue; + } + Optional nameAnnot = annotation.getName(); + if (nameAnnot.isEmpty()) { + continue; + } + String value = nameAnnot.get(); + if (value.equals(Constants.NAME)) { + return ((LinkedHashMap) annotAttSymbol.attachmentValue().orElseThrow().value()) + .get("value").toString(); + } + } + return fieldName; + } + + private String getAnnotModuleName(AnnotationSymbol annotation) { + Optional moduleSymbol = annotation.getModule(); + if (moduleSymbol.isEmpty()) { + return ""; + } + Optional moduleName = moduleSymbol.get().getName(); + return moduleName.orElse(""); + } +} diff --git a/compiler-plugin/src/main/java/module-info.java b/compiler-plugin/src/main/java/module-info.java new file mode 100644 index 0000000..aa99b17 --- /dev/null +++ b/compiler-plugin/src/main/java/module-info.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (http://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +module io.ballerina.stdlib.jsondata.compiler { + requires io.ballerina.lang; + requires io.ballerina.tools.api; + requires io.ballerina.parser; +} diff --git a/gradle.properties b/gradle.properties new file mode 100644 index 0000000..b37a623 --- /dev/null +++ b/gradle.properties @@ -0,0 +1,15 @@ +org.gradle.caching=true +group=io.ballerina.stdlib +version=0.1.0-SNAPSHOT +ballerinaLangVersion=2201.8.4 + +checkstyleToolVersion=10.12.0 +puppycrawlCheckstyleVersion=10.12.0 +testngVersion=7.6.1 +slf4jVersion=2.0.7 +githubSpotbugsVersion=5.0.14 +githubJohnrengelmanShadowVersion=8.1.1 +underCouchDownloadVersion=4.0.4 +researchgateReleaseVersion=2.8.0 +ballerinaGradlePluginVersion=2.0.1 +stdlibIoVersion=1.6.0 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..033e24c Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..9f4197d --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,7 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.2.1-bin.zip +networkTimeout=10000 +validateDistributionUrl=true +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew new file mode 100755 index 0000000..fcb6fca --- /dev/null +++ b/gradlew @@ -0,0 +1,248 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..93e3f59 --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,92 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/native/build.gradle b/native/build.gradle new file mode 100644 index 0000000..9a7ca44 --- /dev/null +++ b/native/build.gradle @@ -0,0 +1,77 @@ +/** + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +plugins { + id 'java' + id 'checkstyle' + id 'com.github.spotbugs' +} + +description = 'Ballerina - Data.Json Java Utils' + +dependencies { + implementation 'junit:junit:4.13.1' + checkstyle project(':checkstyle') + checkstyle "com.puppycrawl.tools:checkstyle:${puppycrawlCheckstyleVersion}" + implementation 'org.apache.commons:commons-lang3:3.6' + + implementation group: 'org.ballerinalang', name: 'ballerina-lang', version: "${ballerinaLangVersion}" + implementation group: 'org.ballerinalang', name: 'ballerina-runtime', version: "${ballerinaLangVersion}" + implementation group: 'org.ballerinalang', name: 'value', version: "${ballerinaLangVersion}" +} + +checkstyle { + toolVersion "${checkstyleToolVersion}" + configFile rootProject.file("build-config/checkstyle/build/checkstyle.xml") + configProperties = ["suppressionFile" : file("${rootDir}/build-config/checkstyle/build/suppressions.xml")] +} + +checkstyleMain.dependsOn(":checkstyle:downloadCheckstyleRuleFiles") + +def excludePattern = '**/module-info.java' +tasks.withType(Checkstyle) { + exclude excludePattern +} + +spotbugsMain { + enabled=false + effort "max" + reportLevel "low" + reportsDir = file("$project.buildDir/reports/spotbugs") + reports { + html.enabled true + text.enabled = true + } + def excludeFile = file("${rootDir}/spotbugs-exclude.xml") + if(excludeFile.exists()) { + excludeFilter = excludeFile + } +} + +spotbugsTest { + enabled = false +} + +compileJava { + doFirst { + options.compilerArgs = [ + '--module-path', classpath.asPath, + ] + classpath = files() + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/FromString.java b/native/src/main/java/io/ballerina/lib/data/jsondata/FromString.java new file mode 100644 index 0000000..b146468 --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/FromString.java @@ -0,0 +1,329 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata; + +import io.ballerina.lib.data.jsondata.utils.DiagnosticErrorCode; +import io.ballerina.lib.data.jsondata.utils.DiagnosticLog; +import io.ballerina.runtime.api.PredefinedTypes; +import io.ballerina.runtime.api.TypeTags; +import io.ballerina.runtime.api.creators.TypeCreator; +import io.ballerina.runtime.api.creators.ValueCreator; +import io.ballerina.runtime.api.types.FiniteType; +import io.ballerina.runtime.api.types.IntersectionType; +import io.ballerina.runtime.api.types.ReferenceType; +import io.ballerina.runtime.api.types.Type; +import io.ballerina.runtime.api.types.UnionType; +import io.ballerina.runtime.api.utils.StringUtils; +import io.ballerina.runtime.api.utils.TypeUtils; +import io.ballerina.runtime.api.values.BDecimal; +import io.ballerina.runtime.api.values.BError; +import io.ballerina.runtime.api.values.BString; +import io.ballerina.runtime.api.values.BTypedesc; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; + +/** + * Native implementation of data:fromStringWithType(string). + * + * @since 0.1.0 + */ +public class FromString { + + private static final List TYPE_PRIORITY_ORDER = List.of( + TypeTags.INT_TAG, + TypeTags.FLOAT_TAG, + TypeTags.DECIMAL_TAG, + TypeTags.NULL_TAG, + TypeTags.BOOLEAN_TAG, + TypeTags.JSON_TAG, + TypeTags.STRING_TAG + ); + + private static final List BASIC_JSON_MEMBER_TYPES = List.of( + PredefinedTypes.TYPE_NULL, + PredefinedTypes.TYPE_BOOLEAN, + PredefinedTypes.TYPE_INT, + PredefinedTypes.TYPE_FLOAT, + PredefinedTypes.TYPE_DECIMAL, + PredefinedTypes.TYPE_STRING + ); + private static final UnionType JSON_TYPE_WITH_BASIC_TYPES = TypeCreator.createUnionType(BASIC_JSON_MEMBER_TYPES); + public static final Integer BBYTE_MIN_VALUE = 0; + public static final Integer BBYTE_MAX_VALUE = 255; + public static final Integer SIGNED32_MAX_VALUE = 2147483647; + public static final Integer SIGNED32_MIN_VALUE = -2147483648; + public static final Integer SIGNED16_MAX_VALUE = 32767; + public static final Integer SIGNED16_MIN_VALUE = -32768; + public static final Integer SIGNED8_MAX_VALUE = 127; + public static final Integer SIGNED8_MIN_VALUE = -128; + public static final Long UNSIGNED32_MAX_VALUE = 4294967295L; + public static final Integer UNSIGNED16_MAX_VALUE = 65535; + public static final Integer UNSIGNED8_MAX_VALUE = 255; + + public static Object fromStringWithType(BString string, BTypedesc typed) { + Type expType = typed.getDescribingType(); + + try { + return fromStringWithType(string, expType); + } catch (NumberFormatException e) { + return returnError(string.getValue(), expType.toString()); + } + } + + public static Object fromStringWithType(BString string, Type expType) { + String value = string.getValue(); + try { + switch (expType.getTag()) { + case TypeTags.INT_TAG: + return stringToInt(value); + case TypeTags.BYTE_TAG: + return stringToByte(value); + case TypeTags.SIGNED8_INT_TAG: + return stringToSigned8Int(value); + case TypeTags.SIGNED16_INT_TAG: + return stringToSigned16Int(value); + case TypeTags.SIGNED32_INT_TAG: + return stringToSigned32Int(value); + case TypeTags.UNSIGNED8_INT_TAG: + return stringToUnsigned8Int(value); + case TypeTags.UNSIGNED16_INT_TAG: + return stringToUnsigned16Int(value); + case TypeTags.UNSIGNED32_INT_TAG: + return stringToUnsigned32Int(value); + case TypeTags.FLOAT_TAG: + return stringToFloat(value); + case TypeTags.DECIMAL_TAG: + return stringToDecimal(value); + case TypeTags.CHAR_STRING_TAG: + return stringToChar(value); + case TypeTags.STRING_TAG: + return string; + case TypeTags.BOOLEAN_TAG: + return stringToBoolean(value); + case TypeTags.NULL_TAG: + return stringToNull(value); + case TypeTags.FINITE_TYPE_TAG: + return stringToFiniteType(value, (FiniteType) expType); + case TypeTags.UNION_TAG: + return stringToUnion(string, (UnionType) expType); + case TypeTags.JSON_TAG: + return stringToUnion(string, JSON_TYPE_WITH_BASIC_TYPES); + case TypeTags.TYPE_REFERENCED_TYPE_TAG: + return fromStringWithType(string, ((ReferenceType) expType).getReferredType()); + case TypeTags.INTERSECTION_TAG: + return fromStringWithType(string, ((IntersectionType) expType).getEffectiveType()); + default: + return returnError(value, expType.toString()); + } + } catch (NumberFormatException e) { + return returnError(value, expType.toString()); + } + } + + private static Object stringToFiniteType(String value, FiniteType finiteType) { + return finiteType.getValueSpace().stream() + .filter(finiteValue -> !(convertToSingletonValue(value, finiteValue) instanceof BError)) + .findFirst() + .orElseGet(() -> returnError(value, finiteType.toString())); + } + + private static Object convertToSingletonValue(String str, Object singletonValue) { + if (str.equals(singletonValue.toString())) { + return fromStringWithType(StringUtils.fromString(str), TypeUtils.getType(singletonValue)); + } else { + return returnError(str, singletonValue.toString()); + } + } + + private static Long stringToInt(String value) throws NumberFormatException { + return Long.parseLong(value); + } + + private static int stringToByte(String value) throws NumberFormatException { + int intValue = Integer.parseInt(value); + if (!isByteLiteral(intValue)) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, PredefinedTypes.TYPE_BYTE, value); + } + return intValue; + } + + private static long stringToSigned8Int(String value) throws NumberFormatException { + long intValue = Long.parseLong(value); + if (!isSigned8LiteralValue(intValue)) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, PredefinedTypes.TYPE_INT_SIGNED_8, value); + } + return intValue; + } + + private static long stringToSigned16Int(String value) throws NumberFormatException { + long intValue = Long.parseLong(value); + if (!isSigned16LiteralValue(intValue)) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, PredefinedTypes.TYPE_INT_SIGNED_16, value); + } + return intValue; + } + + private static long stringToSigned32Int(String value) throws NumberFormatException { + long intValue = Long.parseLong(value); + if (!isSigned32LiteralValue(intValue)) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, PredefinedTypes.TYPE_INT_SIGNED_32, value); + } + return intValue; + } + + private static long stringToUnsigned8Int(String value) throws NumberFormatException { + long intValue = Long.parseLong(value); + if (!isUnsigned8LiteralValue(intValue)) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, + PredefinedTypes.TYPE_INT_UNSIGNED_8, value); + } + return intValue; + } + + private static long stringToUnsigned16Int(String value) throws NumberFormatException { + long intValue = Long.parseLong(value); + if (!isUnsigned16LiteralValue(intValue)) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, + PredefinedTypes.TYPE_INT_UNSIGNED_16, value); + } + return intValue; + } + + private static long stringToUnsigned32Int(String value) throws NumberFormatException { + long intValue = Long.parseLong(value); + if (!isUnsigned32LiteralValue(intValue)) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, + PredefinedTypes.TYPE_INT_UNSIGNED_32, value); + } + return intValue; + } + + private static BString stringToChar(String value) throws NumberFormatException { + if (!isCharLiteralValue(value)) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, + PredefinedTypes.TYPE_STRING_CHAR, value); + } + return StringUtils.fromString(value); + } + + private static Double stringToFloat(String value) throws NumberFormatException { + if (hasFloatOrDecimalLiteralSuffix(value)) { + throw new NumberFormatException(); + } + return Double.parseDouble(value); + } + + private static BDecimal stringToDecimal(String value) throws NumberFormatException { + return ValueCreator.createDecimalValue(value); + } + + private static Object stringToBoolean(String value) throws NumberFormatException { + if ("true".equalsIgnoreCase(value) || "1".equalsIgnoreCase(value)) { + return true; + } + + if ("false".equalsIgnoreCase(value) || "0".equalsIgnoreCase(value)) { + return false; + } + return returnError(value, "boolean"); + } + + private static Object stringToNull(String value) throws NumberFormatException { + if ("null".equalsIgnoreCase(value) || "()".equalsIgnoreCase(value)) { + return null; + } + return returnError(value, "()"); + } + + private static Object stringToUnion(BString string, UnionType expType) throws NumberFormatException { + List memberTypes = new ArrayList<>(expType.getMemberTypes()); + memberTypes.sort(Comparator.comparingInt(t -> { + int index = TYPE_PRIORITY_ORDER.indexOf(TypeUtils.getReferredType(t).getTag()); + return index == -1 ? Integer.MAX_VALUE : index; + })); + for (Type memberType : memberTypes) { + try { + Object result = fromStringWithType(string, memberType); + if (result instanceof BError) { + continue; + } + return result; + } catch (Exception e) { + // Skip + } + } + return returnError(string.getValue(), expType.toString()); + } + + private static boolean hasFloatOrDecimalLiteralSuffix(String value) { + int length = value.length(); + if (length == 0) { + return false; + } + + switch (value.charAt(length - 1)) { + case 'F': + case 'f': + case 'D': + case 'd': + return true; + default: + return false; + } + } + + private static boolean isByteLiteral(long longValue) { + return (longValue >= BBYTE_MIN_VALUE && longValue <= BBYTE_MAX_VALUE); + } + + private static boolean isSigned32LiteralValue(Long longObject) { + return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE); + } + + private static boolean isSigned16LiteralValue(Long longObject) { + return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE); + } + + private static boolean isSigned8LiteralValue(Long longObject) { + return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE); + } + + private static boolean isUnsigned32LiteralValue(Long longObject) { + return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE); + } + + private static boolean isUnsigned16LiteralValue(Long longObject) { + return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE); + } + + private static boolean isUnsigned8LiteralValue(Long longObject) { + return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE); + } + + private static boolean isCharLiteralValue(String value) { + return value.codePoints().count() == 1; + } + + private static BError returnError(String string, String expType) { + return DiagnosticLog.error(DiagnosticErrorCode.CANNOT_CONVERT_TO_EXPECTED_TYPE, + PredefinedTypes.TYPE_STRING.getName(), string, expType); + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/io/BallerinaByteBlockInputStream.java b/native/src/main/java/io/ballerina/lib/data/jsondata/io/BallerinaByteBlockInputStream.java new file mode 100644 index 0000000..72cbbde --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/io/BallerinaByteBlockInputStream.java @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.io; + +import io.ballerina.lib.data.jsondata.utils.DiagnosticLog; +import io.ballerina.runtime.api.Environment; +import io.ballerina.runtime.api.async.Callback; +import io.ballerina.runtime.api.async.StrandMetadata; +import io.ballerina.runtime.api.types.MethodType; +import io.ballerina.runtime.api.types.Type; +import io.ballerina.runtime.api.values.BArray; +import io.ballerina.runtime.api.values.BError; +import io.ballerina.runtime.api.values.BMap; +import io.ballerina.runtime.api.values.BObject; +import io.ballerina.runtime.api.values.BString; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.Map; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +/** + * Java Input Stream based on Ballerina byte block stream. stream + * + * @since 0.1.0 + */ +public class BallerinaByteBlockInputStream extends InputStream { + + private final BObject iterator; + private final Environment env; + private final String nextMethodName; + private final Type returnType; + private final String strandName; + private final StrandMetadata metadata; + private final Map properties; + private final AtomicBoolean done = new AtomicBoolean(false); + private final MethodType closeMethod; + private final Consumer futureResultConsumer; + + private byte[] currentChunk = new byte[0]; + private int nextChunkIndex = 0; + + public BallerinaByteBlockInputStream(Environment env, BObject iterator, MethodType nextMethod, + MethodType closeMethod, Consumer futureResultConsumer) { + this.env = env; + this.iterator = iterator; + this.nextMethodName = nextMethod.getName(); + this.returnType = nextMethod.getReturnType(); + this.closeMethod = closeMethod; + this.strandName = env.getStrandName().orElse(""); + this.metadata = env.getStrandMetadata(); + this.properties = Map.of(); + this.futureResultConsumer = futureResultConsumer; + } + + @Override + public int read() { + if (done.get()) { + return -1; + } + if (hasBytesInCurrentChunk()) { + return currentChunk[nextChunkIndex++]; + } + // Need to get a new block from the stream, before reading again. + nextChunkIndex = 0; + try { + if (readNextChunk()) { + return read(); + } + } catch (InterruptedException e) { + BError error = DiagnosticLog.getJsonError("Cannot read the stream, interrupted error"); + futureResultConsumer.accept(error); + return -1; + } + return -1; + } + + @Override + public void close() throws IOException { + super.close(); + Semaphore semaphore = new Semaphore(0); + if (closeMethod != null) { + env.getRuntime().invokeMethodAsyncSequentially(iterator, closeMethod.getName(), strandName, metadata, + new Callback() { + @Override + public void notifyFailure(BError bError) { + semaphore.release(); + } + + @Override + public void notifySuccess(Object result) { + semaphore.release(); + } + }, properties, returnType); + } + try { + semaphore.acquire(); + } catch (InterruptedException e) { + throw new IOException("Error while closing the stream", e); + } + } + + private boolean hasBytesInCurrentChunk() { + return currentChunk.length != 0 && nextChunkIndex < currentChunk.length; + } + + private boolean readNextChunk() throws InterruptedException { + Semaphore semaphore = new Semaphore(0); + Callback callback = new Callback() { + + @Override + public void notifyFailure(BError bError) { + // Panic with an error + done.set(true); + futureResultConsumer.accept(bError); + currentChunk = new byte[0]; + semaphore.release(); + // TODO : Should we panic here? + } + + @Override + public void notifySuccess(Object result) { + if (result == null) { + done.set(true); + currentChunk = new byte[0]; + semaphore.release(); + return; + } + if (result instanceof BMap) { + BMap valueRecord = (BMap) result; + final BString value = Arrays.stream(valueRecord.getKeys()).findFirst().get(); + final BArray arrayValue = valueRecord.getArrayValue(value); + currentChunk = arrayValue.getByteArray(); + semaphore.release(); + } else { + // Case where Completes with an error + done.set(true); + semaphore.release(); + } + } + + }; + env.getRuntime().invokeMethodAsyncSequentially(iterator, nextMethodName, strandName, metadata, callback, + properties, returnType); + semaphore.acquire(); + return !done.get(); + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/io/DataReaderTask.java b/native/src/main/java/io/ballerina/lib/data/jsondata/io/DataReaderTask.java new file mode 100644 index 0000000..f36bff2 --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/io/DataReaderTask.java @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package io.ballerina.lib.data.jsondata.io; + +import io.ballerina.lib.data.jsondata.json.JsonParser; +import io.ballerina.lib.data.jsondata.utils.DiagnosticLog; +import io.ballerina.runtime.api.Environment; +import io.ballerina.runtime.api.Future; +import io.ballerina.runtime.api.types.MethodType; +import io.ballerina.runtime.api.types.ObjectType; +import io.ballerina.runtime.api.utils.TypeUtils; +import io.ballerina.runtime.api.values.BMap; +import io.ballerina.runtime.api.values.BObject; +import io.ballerina.runtime.api.values.BString; +import io.ballerina.runtime.api.values.BTypedesc; + +import java.io.InputStreamReader; +import java.util.function.Consumer; + +/** + * This class will read data from a Ballerina Stream of byte blocks, in non-blocking manner. + * + * @since 0.1.0 + */ +public class DataReaderTask implements Runnable { + + private static final String METHOD_NAME_NEXT = "next"; + private static final String METHOD_NAME_CLOSE = "close"; + + private final Environment env; + private final BObject iteratorObj; + private final Future future; + private final BTypedesc typed; + private final BMap options; + + public DataReaderTask(Environment env, BObject iteratorObj, Future future, BTypedesc typed, BMap options) { + this.env = env; + this.iteratorObj = iteratorObj; + this.future = future; + this.typed = typed; + this.options = options; + } + + static MethodType resolveNextMethod(BObject iterator) { + MethodType method = getMethodType(iterator, METHOD_NAME_NEXT); + if (method != null) { + return method; + } + throw new IllegalStateException("next method not found in the iterator object"); + } + + static MethodType resolveCloseMethod(BObject iterator) { + return getMethodType(iterator, METHOD_NAME_CLOSE); + } + + private static MethodType getMethodType(BObject iterator, String methodNameClose) { + ObjectType objectType = (ObjectType) TypeUtils.getReferredType(iterator.getOriginalType()); + MethodType[] methods = objectType.getMethods(); + // Assumes compile-time validation of the iterator object + for (MethodType method : methods) { + if (method.getName().equals(methodNameClose)) { + return method; + } + } + return null; + } + + @Override + public void run() { + ResultConsumer resultConsumer = new ResultConsumer<>(future); + try (var byteBlockSteam = new BallerinaByteBlockInputStream(env, iteratorObj, resolveNextMethod(iteratorObj), + resolveCloseMethod(iteratorObj), resultConsumer)) { + Object result = JsonParser.parse(new InputStreamReader(byteBlockSteam), options, typed.getDescribingType()); + future.complete(result); + } catch (Exception e) { + future.complete(DiagnosticLog.getJsonError("Error occurred while reading the stream: " + e.getMessage())); + } + } + + /** + * This class will hold module related utility functions. + * + * @param The type of the result + * @param future The future to complete + * @since 0.1.0 + */ + public record ResultConsumer(Future future) implements Consumer { + + @Override + public void accept(T t) { + future.complete(t); + } + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/io/DataReaderThreadPool.java b/native/src/main/java/io/ballerina/lib/data/jsondata/io/DataReaderThreadPool.java new file mode 100644 index 0000000..631c3e9 --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/io/DataReaderThreadPool.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package io.ballerina.lib.data.jsondata.io; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.SynchronousQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +/** + * Thread pool for data reader. + * + * @since 0.1.0 + */ +public class DataReaderThreadPool { + + // TODO : Make this configurable, in Ballerina Library. + private static final int CORE_POOL_SIZE = 0; + private static final int MAX_POOL_SIZE = 50; + private static final long KEEP_ALIVE_TIME = 60L; + private static final String THREAD_NAME = "bal-data-jsondata-thread"; + public static final ExecutorService EXECUTOR_SERVICE = new ThreadPoolExecutor(CORE_POOL_SIZE, + MAX_POOL_SIZE, KEEP_ALIVE_TIME, TimeUnit.SECONDS, new SynchronousQueue<>(), new DataThreadFactory()); + + /** + * Thread factory for data reader. + * + * @since 0.1.0 + */ + static class DataThreadFactory implements ThreadFactory { + + @Override + public Thread newThread(Runnable runnable) { + Thread ballerinaData = new Thread(runnable); + ballerinaData.setName(THREAD_NAME); + return ballerinaData; + } + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonCreator.java b/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonCreator.java new file mode 100644 index 0000000..ef2c677 --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonCreator.java @@ -0,0 +1,308 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.json; + +import io.ballerina.lib.data.jsondata.FromString; +import io.ballerina.lib.data.jsondata.utils.Constants; +import io.ballerina.lib.data.jsondata.utils.DiagnosticErrorCode; +import io.ballerina.lib.data.jsondata.utils.DiagnosticLog; +import io.ballerina.runtime.api.PredefinedTypes; +import io.ballerina.runtime.api.TypeTags; +import io.ballerina.runtime.api.creators.ValueCreator; +import io.ballerina.runtime.api.types.ArrayType; +import io.ballerina.runtime.api.types.Field; +import io.ballerina.runtime.api.types.IntersectionType; +import io.ballerina.runtime.api.types.MapType; +import io.ballerina.runtime.api.types.RecordType; +import io.ballerina.runtime.api.types.TupleType; +import io.ballerina.runtime.api.types.Type; +import io.ballerina.runtime.api.utils.StringUtils; +import io.ballerina.runtime.api.utils.TypeUtils; +import io.ballerina.runtime.api.values.BArray; +import io.ballerina.runtime.api.values.BError; +import io.ballerina.runtime.api.values.BMap; +import io.ballerina.runtime.api.values.BString; +import org.ballerinalang.langlib.value.CloneReadOnly; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Stack; + +/** + * Create objects for partially parsed json. + * + * @since 0.1.0 + */ +public class JsonCreator { + + static BMap initRootMapValue(Type expectedType) { + return switch (expectedType.getTag()) { + case TypeTags.RECORD_TYPE_TAG -> + ValueCreator.createRecordValue(expectedType.getPackage(), expectedType.getName()); + case TypeTags.MAP_TAG -> ValueCreator.createMapValue((MapType) expectedType); + case TypeTags.JSON_TAG -> ValueCreator.createMapValue(Constants.JSON_MAP_TYPE); + case TypeTags.ANYDATA_TAG -> ValueCreator.createMapValue(Constants.ANYDATA_MAP_TYPE); + case TypeTags.UNION_TAG -> throw DiagnosticLog.error(DiagnosticErrorCode.UNSUPPORTED_TYPE, expectedType); + default -> throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE, expectedType, "map type"); + }; + } + + static BArray initArrayValue(Type expectedType) { + return switch (expectedType.getTag()) { + case TypeTags.TUPLE_TAG -> ValueCreator.createTupleValue((TupleType) expectedType); + case TypeTags.ARRAY_TAG -> ValueCreator.createArrayValue((ArrayType) expectedType); + case TypeTags.JSON_TAG -> ValueCreator.createArrayValue(PredefinedTypes.TYPE_JSON_ARRAY); + case TypeTags.ANYDATA_TAG -> ValueCreator.createArrayValue(PredefinedTypes.TYPE_ANYDATA_ARRAY); + default -> throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE, expectedType, "list type"); + }; + } + + static Optional> initNewMapValue(JsonParser.StateMachine sm) { + JsonParser.StateMachine.ParserContext parentContext = sm.parserContexts.peek(); + sm.parserContexts.push(JsonParser.StateMachine.ParserContext.MAP); + Type expType = sm.expectedTypes.peek(); + if (expType == null) { + sm.fieldNameHierarchy.push(new Stack<>()); + return Optional.empty(); + } + + if (sm.currentJsonNode != null) { + sm.nodesStack.push(sm.currentJsonNode); + } + BMap nextMapValue = checkTypeAndCreateMappingValue(sm, expType, parentContext); + return Optional.of(nextMapValue); + } + + static BMap checkTypeAndCreateMappingValue(JsonParser.StateMachine sm, Type expType, + JsonParser.StateMachine.ParserContext parentContext) { + Type currentType = TypeUtils.getReferredType(expType); + BMap nextMapValue; + switch (currentType.getTag()) { + case TypeTags.RECORD_TYPE_TAG -> { + RecordType recordType = (RecordType) currentType; + nextMapValue = ValueCreator.createRecordValue(expType.getPackage(), expType.getName()); + sm.updateExpectedType(recordType.getFields(), recordType.getRestFieldType()); + } + case TypeTags.MAP_TAG -> { + nextMapValue = ValueCreator.createMapValue((MapType) currentType); + sm.updateExpectedType(new HashMap<>(), ((MapType) currentType).getConstrainedType()); + } + case TypeTags.JSON_TAG -> { + nextMapValue = ValueCreator.createMapValue(Constants.JSON_MAP_TYPE); + sm.updateExpectedType(new HashMap<>(), currentType); + } + case TypeTags.ANYDATA_TAG -> { + nextMapValue = ValueCreator.createMapValue(Constants.ANYDATA_MAP_TYPE); + sm.updateExpectedType(new HashMap<>(), currentType); + } + case TypeTags.INTERSECTION_TAG -> { + Optional mutableType = getMutableType((IntersectionType) currentType); + if (mutableType.isEmpty()) { + throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE, currentType, "map type"); + } + return checkTypeAndCreateMappingValue(sm, mutableType.get(), parentContext); + } + case TypeTags.UNION_TAG -> throw DiagnosticLog.error(DiagnosticErrorCode.UNSUPPORTED_TYPE, currentType); + default -> { + if (parentContext == JsonParser.StateMachine.ParserContext.ARRAY) { + throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE, currentType, "map type"); + } + throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE_FOR_FIELD, getCurrentFieldPath(sm)); + } + } + return nextMapValue; + } + + static void updateNextMapValue(JsonParser.StateMachine sm) { + Optional> nextMap = initNewMapValue(sm); + if (nextMap.isPresent()) { + sm.currentJsonNode = nextMap.get(); + } else { + // This will restrict from checking the fieldHierarchy. + sm.jsonFieldDepth++; + } + } + + static Optional initNewArrayValue(JsonParser.StateMachine sm) { + sm.parserContexts.push(JsonParser.StateMachine.ParserContext.ARRAY); + if (sm.expectedTypes.peek() == null) { + return Optional.empty(); + } + + Object currentJsonNode = sm.currentJsonNode; + Type expType = TypeUtils.getReferredType(sm.expectedTypes.peek()); + if (expType.getTag() == TypeTags.INTERSECTION_TAG) { + Optional type = getMutableType((IntersectionType) expType); + if (type.isEmpty()) { + throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE, expType, "array type"); + } + expType = type.get(); + } + BArray nextArrValue = initArrayValue(expType); + if (currentJsonNode == null) { + return Optional.ofNullable(nextArrValue); + } + + sm.nodesStack.push(currentJsonNode); + return Optional.ofNullable(nextArrValue); + } + + static Optional getMutableType(IntersectionType intersectionType) { + for (Type constituentType : intersectionType.getConstituentTypes()) { + if (constituentType.getTag() == TypeTags.READONLY_TAG) { + continue; + } + return Optional.of(constituentType); + } + return Optional.empty(); + } + + private static String getCurrentFieldPath(JsonParser.StateMachine sm) { + Iterator> itr = sm.fieldNameHierarchy.iterator(); + StringBuilder result = new StringBuilder(itr.hasNext() ? itr.next().peek() : ""); + while (itr.hasNext()) { + result.append(".").append(itr.next().peek()); + } + return result.toString(); + } + + static Object convertAndUpdateCurrentJsonNode(JsonParser.StateMachine sm, BString value, Type type) { + Object currentJson = sm.currentJsonNode; + Object convertedValue = convertToExpectedType(value, type); + if (convertedValue instanceof BError) { + if (sm.currentField != null) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_VALUE_FOR_FIELD, value, type, + getCurrentFieldPath(sm)); + } + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, type, value); + } + + Type currentJsonNodeType = TypeUtils.getType(currentJson); + switch (currentJsonNodeType.getTag()) { + case TypeTags.MAP_TAG, TypeTags.RECORD_TYPE_TAG -> { + ((BMap) currentJson).put(StringUtils.fromString(sm.fieldNameHierarchy.peek().pop()), + convertedValue); + return currentJson; + } + case TypeTags.ARRAY_TAG -> { + // Handle projection in array. + ArrayType arrayType = (ArrayType) currentJsonNodeType; + if (arrayType.getState() == ArrayType.ArrayState.CLOSED && + arrayType.getSize() <= sm.arrayIndexes.peek()) { + return currentJson; + } + ((BArray) currentJson).add(sm.arrayIndexes.peek(), convertedValue); + return currentJson; + } + case TypeTags.TUPLE_TAG -> { + ((BArray) currentJson).add(sm.arrayIndexes.peek(), convertedValue); + return currentJson; + } + default -> { + return convertedValue; + } + } + } + + private static Object convertToExpectedType(BString value, Type type) { + if (type.getTag() == TypeTags.ANYDATA_TAG) { + return FromString.fromStringWithType(value, PredefinedTypes.TYPE_JSON); + } + return FromString.fromStringWithType(value, type); + } + + static void updateRecordFieldValue(BString fieldName, Object parent, Object currentJson) { + int typeTag = TypeUtils.getType(parent).getTag(); + if (typeTag == TypeTags.MAP_TAG || typeTag == TypeTags.RECORD_TYPE_TAG) { + ((BMap) parent).put(fieldName, currentJson); + } + } + + static Type getMemberType(Type expectedType, int index, boolean allowDataProjection) { + if (expectedType == null) { + return null; + } + + if (expectedType.getTag() == TypeTags.ARRAY_TAG) { + ArrayType arrayType = (ArrayType) expectedType; + if (arrayType.getState() == ArrayType.ArrayState.OPEN + || arrayType.getState() == ArrayType.ArrayState.CLOSED && index < arrayType.getSize()) { + return arrayType.getElementType(); + } + + if (!allowDataProjection) { + throw DiagnosticLog.error(DiagnosticErrorCode.ARRAY_SIZE_MISMATCH); + } + return null; + } else if (expectedType.getTag() == TypeTags.TUPLE_TAG) { + TupleType tupleType = (TupleType) expectedType; + List tupleTypes = tupleType.getTupleTypes(); + if (tupleTypes.size() < index + 1) { + Type restType = tupleType.getRestType(); + if (restType == null && !allowDataProjection) { + throw DiagnosticLog.error(DiagnosticErrorCode.ARRAY_SIZE_MISMATCH); + } + return restType; + } + return tupleTypes.get(index); + } + return expectedType; + } + + static Map getAllFieldsInRecord(RecordType recordType) { + BMap annotations = recordType.getAnnotations(); + Map modifiedNames = new HashMap<>(); + for (BString annotationKey : annotations.getKeys()) { + String keyStr = annotationKey.getValue(); + if (!keyStr.contains(Constants.FIELD)) { + continue; + } + String fieldName = keyStr.split(Constants.FIELD_REGEX)[1]; + Map fieldAnnotation = (Map) annotations.get(annotationKey); + modifiedNames.put(fieldName, getModifiedName(fieldAnnotation, fieldName)); + } + + Map fields = new HashMap<>(); + Map recordFields = recordType.getFields(); + for (String key : recordFields.keySet()) { + String fieldName = modifiedNames.getOrDefault(key, key); + if (fields.containsKey(fieldName)) { + throw DiagnosticLog.error(DiagnosticErrorCode.DUPLICATE_FIELD, fieldName); + } + fields.put(fieldName, recordFields.get(key)); + } + return fields; + } + + static String getModifiedName(Map fieldAnnotation, String fieldName) { + for (BString key : fieldAnnotation.keySet()) { + if (key.getValue().endsWith(Constants.NAME)) { + return ((Map) fieldAnnotation.get(key)).get(Constants.VALUE).toString(); + } + } + return fieldName; + } + + static Object constructReadOnlyValue(Object value) { + return CloneReadOnly.cloneReadOnly(value); + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonParser.java b/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonParser.java new file mode 100644 index 0000000..f4e946d --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonParser.java @@ -0,0 +1,1293 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.json; + +import io.ballerina.lib.data.jsondata.utils.Constants; +import io.ballerina.lib.data.jsondata.utils.DiagnosticErrorCode; +import io.ballerina.lib.data.jsondata.utils.DiagnosticLog; +import io.ballerina.runtime.api.TypeTags; +import io.ballerina.runtime.api.creators.ErrorCreator; +import io.ballerina.runtime.api.flags.SymbolFlags; +import io.ballerina.runtime.api.types.ArrayType; +import io.ballerina.runtime.api.types.Field; +import io.ballerina.runtime.api.types.IntersectionType; +import io.ballerina.runtime.api.types.MapType; +import io.ballerina.runtime.api.types.RecordType; +import io.ballerina.runtime.api.types.Type; +import io.ballerina.runtime.api.types.UnionType; +import io.ballerina.runtime.api.utils.StringUtils; +import io.ballerina.runtime.api.utils.TypeUtils; +import io.ballerina.runtime.api.values.BArray; +import io.ballerina.runtime.api.values.BError; +import io.ballerina.runtime.api.values.BMap; +import io.ballerina.runtime.api.values.BString; +import org.apache.commons.lang3.StringEscapeUtils; +import org.ballerinalang.langlib.value.CloneReadOnly; + +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.Stack; + +/** + * This class converts string to Json with projection. + * + * @since 0.1.0 + */ +public class JsonParser { + + private static final ThreadLocal tlStateMachine = ThreadLocal.withInitial(StateMachine::new); + + /** + * Parses the contents in the given {@link Reader} and returns a json. + * + * @param reader reader which contains the JSON content + * @param options represent the options that can be used to modify the behaviour of conversion + * @return JSON structure + * @throws BError for any parsing error + */ + public static Object parse(Reader reader, BMap options, Type type) + throws BError { + StateMachine sm = tlStateMachine.get(); + try { + return sm.execute(reader, options, TypeUtils.getReferredType(type)); + } finally { + // Need to reset the state machine before leaving. Otherwise, references to the created + // JSON values will be maintained and the java GC will not happen properly. + sm.reset(); + } + } + + /** + * Represents a JSON parser related exception. + */ + public static class JsonParserException extends Exception { + public JsonParserException(String msg) { + super(msg); + } + } + + /** + * Represents the state machine used for JSON parsing. + */ + static class StateMachine { + + private static final char CR = 0x000D; + private static final char NEWLINE = 0x000A; + private static final char HZ_TAB = 0x0009; + private static final char SPACE = 0x0020; + private static final char BACKSPACE = 0x0008; + private static final char FORMFEED = 0x000C; + private static final char QUOTES = '"'; + private static final char REV_SOL = '\\'; + private static final char SOL = '/'; + private static final char EOF = (char) -1; + private static final State DOC_START_STATE = new DocumentStartState(); + private static final State DOC_END_STATE = new DocumentEndState(); + static final State FIRST_FIELD_READY_STATE = new FirstFieldReadyState(); + private static final State NON_FIRST_FIELD_READY_STATE = new NonFirstFieldReadyState(); + private static final State FIELD_NAME_STATE = new FieldNameState(); + private static final State END_FIELD_NAME_STATE = new EndFieldNameState(); + private static final State FIELD_VALUE_READY_STATE = new FieldValueReadyState(); + private static final State STRING_FIELD_VALUE_STATE = new StringFieldValueState(); + private static final State NON_STRING_FIELD_VALUE_STATE = new NonStringFieldValueState(); + private static final State NON_STRING_VALUE_STATE = new NonStringValueState(); + private static final State STRING_VALUE_STATE = new StringValueState(); + private static final State FIELD_END_STATE = new FieldEndState(); + private static final State STRING_AE_ESC_CHAR_PROCESSING_STATE = new StringAEEscapedCharacterProcessingState(); + private static final State STRING_AE_PROCESSING_STATE = new StringAEProcessingState(); + private static final State FIELD_NAME_UNICODE_HEX_PROCESSING_STATE = new FieldNameUnicodeHexProcessingState(); + static final State FIRST_ARRAY_ELEMENT_READY_STATE = new FirstArrayElementReadyState(); + private static final State NON_FIRST_ARRAY_ELEMENT_READY_STATE = new NonFirstArrayElementReadyState(); + private static final State STRING_ARRAY_ELEMENT_STATE = new StringArrayElementState(); + private static final State NON_STRING_ARRAY_ELEMENT_STATE = new NonStringArrayElementState(); + private static final State ARRAY_ELEMENT_END_STATE = new ArrayElementEndState(); + private static final State STRING_FIELD_ESC_CHAR_PROCESSING_STATE = + new StringFieldEscapedCharacterProcessingState(); + private static final State STRING_VAL_ESC_CHAR_PROCESSING_STATE = + new StringValueEscapedCharacterProcessingState(); + private static final State FIELD_NAME_ESC_CHAR_PROCESSING_STATE = + new FieldNameEscapedCharacterProcessingState(); + private static final State STRING_FIELD_UNICODE_HEX_PROCESSING_STATE = + new StringFieldUnicodeHexProcessingState(); + private static final State STRING_VALUE_UNICODE_HEX_PROCESSING_STATE = + new StringValueUnicodeHexProcessingState(); + + Object currentJsonNode; + Deque nodesStack; + private StringBuilder hexBuilder = new StringBuilder(4); + private char[] charBuff = new char[1024]; + private int charBuffIndex; + + private int index; + private int line; + private int column; + private char currentQuoteChar; + boolean allowDataProjection; + Field currentField; + Stack> fieldHierarchy = new Stack<>(); + Stack> visitedFieldHierarchy = new Stack<>(); + Stack restType = new Stack<>(); + Stack expectedTypes = new Stack<>(); + Stack> fieldNameHierarchy = new Stack<>(); + int jsonFieldDepth = 0; + Stack arrayIndexes = new Stack<>(); + Stack parserContexts = new Stack<>(); + + StateMachine() { + reset(); + } + + public void reset() { + index = 0; + currentJsonNode = null; + line = 1; + column = 0; + nodesStack = new ArrayDeque<>(); + fieldNameHierarchy.clear(); + fieldHierarchy.clear(); + currentField = null; + restType.clear(); + expectedTypes.clear(); + jsonFieldDepth = 0; + arrayIndexes.clear(); + } + + private static boolean isWhitespace(char ch) { + return ch == SPACE || ch == HZ_TAB || ch == NEWLINE || ch == CR; + } + + private static void throwExpected(String... chars) throws JsonParserException { + throw new JsonParserException("expected '" + String.join("' or '", chars) + "'"); + } + + private void processLocation(char ch) { + if (ch == '\n') { + this.line++; + this.column = 0; + } else { + this.column++; + } + } + + public Object execute(Reader reader, BMap options, Type type) throws BError { + switch (type.getTag()) { + // TODO: Handle readonly and singleton type as expType. + case TypeTags.RECORD_TYPE_TAG -> { + RecordType recordType = (RecordType) type; + expectedTypes.push(recordType); + updateExpectedType(JsonCreator.getAllFieldsInRecord(recordType), recordType.getRestFieldType()); + } + case TypeTags.ARRAY_TAG, TypeTags.TUPLE_TAG -> { + expectedTypes.push(type); + arrayIndexes.push(0); + } + case TypeTags.NULL_TAG, TypeTags.BOOLEAN_TAG, TypeTags.INT_TAG, TypeTags.BYTE_TAG, + TypeTags.SIGNED8_INT_TAG, TypeTags.SIGNED16_INT_TAG, TypeTags.SIGNED32_INT_TAG, + TypeTags.UNSIGNED8_INT_TAG, TypeTags.UNSIGNED16_INT_TAG, TypeTags.UNSIGNED32_INT_TAG, + TypeTags.FLOAT_TAG, TypeTags.DECIMAL_TAG, TypeTags.CHAR_STRING_TAG, TypeTags.STRING_TAG, + TypeTags.FINITE_TYPE_TAG -> + expectedTypes.push(type); + case TypeTags.JSON_TAG, TypeTags.ANYDATA_TAG -> { + expectedTypes.push(type); + updateExpectedType(new HashMap<>(), type); + } + case TypeTags.MAP_TAG -> { + expectedTypes.push(type); + updateExpectedType(new HashMap<>(), ((MapType) type).getConstrainedType()); + } + case TypeTags.UNION_TAG -> { + if (isSupportedUnionType((UnionType) type)) { + expectedTypes.push(type); + break; + } + throw DiagnosticLog.error(DiagnosticErrorCode.UNSUPPORTED_TYPE, type); + } + case TypeTags.INTERSECTION_TAG -> { + Type effectiveType = ((IntersectionType) type).getEffectiveType(); + if (!SymbolFlags.isFlagOn(SymbolFlags.READONLY, effectiveType.getFlags())) { + throw DiagnosticLog.error(DiagnosticErrorCode.UNSUPPORTED_TYPE, type); + } + + Object jsonValue = null; + for (Type constituentType : ((IntersectionType) type).getConstituentTypes()) { + if (constituentType.getTag() == TypeTags.READONLY_TAG) { + continue; + } + jsonValue = execute(reader, options, TypeUtils.getReferredType(constituentType)); + break; + } + return JsonCreator.constructReadOnlyValue(jsonValue); + } + default -> throw DiagnosticLog.error(DiagnosticErrorCode.UNSUPPORTED_TYPE, type); + } + + allowDataProjection = (boolean) options.get(Constants.ALLOW_DATA_PROJECTION); + + State currentState = DOC_START_STATE; + try { + char[] buff = new char[1024]; + int count; + while ((count = reader.read(buff)) > 0) { + this.index = 0; + while (this.index < count) { + currentState = currentState.transition(this, buff, this.index, count); + } + } + currentState = currentState.transition(this, new char[] { EOF }, 0, 1); + if (currentState != DOC_END_STATE) { + throw ErrorCreator.createError(StringUtils.fromString("invalid JSON document")); + } + return currentJsonNode; + } catch (IOException e) { + throw DiagnosticLog.error(DiagnosticErrorCode.JSON_READER_FAILURE, e.getMessage()); + } catch (JsonParserException e) { + throw DiagnosticLog.error(DiagnosticErrorCode.JSON_PARSER_EXCEPTION, e.getMessage(), line, column); + } + } + + private boolean isSupportedUnionType(UnionType type) { + for (Type memberType : type.getMemberTypes()) { + switch (memberType.getTag()) { + case TypeTags.RECORD_TYPE_TAG, TypeTags.OBJECT_TYPE_TAG, TypeTags.MAP_TAG, TypeTags.JSON_TAG, + TypeTags.ANYDATA_TAG -> { + return false; + } + case TypeTags.UNION_TAG -> { + return !isSupportedUnionType(type); + } + } + } + return true; + } + + private void append(char ch) { + try { + this.charBuff[this.charBuffIndex] = ch; + this.charBuffIndex++; + } catch (ArrayIndexOutOfBoundsException e) { + /* this approach is faster than checking for the size by ourself */ + this.growCharBuff(); + this.charBuff[this.charBuffIndex++] = ch; + } + } + + private void growCharBuff() { + char[] newBuff = new char[charBuff.length * 2]; + System.arraycopy(this.charBuff, 0, newBuff, 0, this.charBuff.length); + this.charBuff = newBuff; + } + + private State finalizeNonArrayObjectAndRemoveExpectedType() { + State state = finalizeNonArrayObject(); + expectedTypes.pop(); + return state; + } + + private State finalizeNonArrayObject() { + if (jsonFieldDepth > 0) { + jsonFieldDepth--; + } + + if (!expectedTypes.isEmpty() && expectedTypes.peek() == null) { + // Skip the value and continue to next state. + parserContexts.pop(); + fieldNameHierarchy.pop(); + if (parserContexts.peek() == ParserContext.MAP) { + return FIELD_END_STATE; + } + return ARRAY_ELEMENT_END_STATE; + } + + Map remainingFields = fieldHierarchy.pop(); + visitedFieldHierarchy.pop(); + fieldNameHierarchy.pop(); + restType.pop(); + for (Field field : remainingFields.values()) { + if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED)) { + throw DiagnosticLog.error(DiagnosticErrorCode.REQUIRED_FIELD_NOT_PRESENT, field.getFieldName()); + } + } + return finalizeObject(); + } + + private State finalizeObject() { + // Skip the value and continue to next state. + parserContexts.pop(); + + if (!expectedTypes.isEmpty() && expectedTypes.peek() == null) { + if (parserContexts.peek() == ParserContext.MAP) { + return FIELD_END_STATE; + } + return ARRAY_ELEMENT_END_STATE; + } + + if (nodesStack.isEmpty()) { + return DOC_END_STATE; + } + + if (expectedTypes.peek().isReadOnly()) { + currentJsonNode = CloneReadOnly.cloneReadOnly(currentJsonNode); + } + + Object parentNode = nodesStack.pop(); + Type parentNodeType = TypeUtils.getType(parentNode); + int parentNodeTypeTag = TypeUtils.getReferredType(parentNodeType).getTag(); + if (parentNodeTypeTag == TypeTags.RECORD_TYPE_TAG || parentNodeTypeTag == TypeTags.MAP_TAG) { + ((BMap) parentNode).put(StringUtils.fromString(fieldNameHierarchy.peek().pop()), + currentJsonNode); + currentJsonNode = parentNode; + return FIELD_END_STATE; + } + + switch (TypeUtils.getType(parentNode).getTag()) { + case TypeTags.ARRAY_TAG -> { + // Handle projection in array. + ArrayType arrayType = (ArrayType) parentNodeType; + if (arrayType.getState() == ArrayType.ArrayState.CLOSED && + arrayType.getSize() <= arrayIndexes.peek()) { + break; + } + ((BArray) parentNode).add(arrayIndexes.peek(), currentJsonNode); + } + case TypeTags.TUPLE_TAG -> ((BArray) parentNode).add(arrayIndexes.peek(), currentJsonNode); + default -> { + } + } + + currentJsonNode = parentNode; + return ARRAY_ELEMENT_END_STATE; + } + + private void updateIndexOfArrayElement() { + int arrayIndex = arrayIndexes.pop(); + arrayIndexes.push(arrayIndex + 1); + } + + public void updateExpectedType(Map fields, Type restType) { + this.fieldHierarchy.push(new HashMap<>(fields)); + this.visitedFieldHierarchy.push(new HashMap<>()); + this.restType.push(restType); + this.fieldNameHierarchy.push(new Stack<>()); + } + + private void updateNextArrayValue() { + arrayIndexes.push(0); + Optional nextArray = JsonCreator.initNewArrayValue(this); + nextArray.ifPresent(array -> currentJsonNode = array); + } + + private State finalizeArrayObject() { + arrayIndexes.pop(); + State state = finalizeObject(); + expectedTypes.pop(); + return state; + } + + public enum ParserContext { + MAP, + ARRAY + } + + /** + * A specific state in the JSON parsing state machine. + */ + interface State { + + /** + * Input given to the current state for a transition. + * + * @param sm the state machine + * @param buff the input characters for the current state + * @param i the location from the character should be read from + * @param count the number of characters to read from the buffer + * @return the new resulting state + */ + State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException; + } + + /** + * Represents the JSON document start state. + */ + private static class DocumentStartState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + char ch; + State state = null; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == '{') { + sm.currentJsonNode = JsonCreator.initRootMapValue(sm.expectedTypes.peek()); + sm.parserContexts.push(JsonParser.StateMachine.ParserContext.MAP); + state = FIRST_FIELD_READY_STATE; + } else if (ch == '[') { + sm.parserContexts.push(JsonParser.StateMachine.ParserContext.ARRAY); + Type expType = sm.expectedTypes.peek(); + // In this point we know rhs is json[] or anydata[] hence init index counter. + if (expType.getTag() == TypeTags.JSON_TAG || expType.getTag() == TypeTags.ANYDATA_TAG) { + sm.arrayIndexes.push(0); + } + sm.currentJsonNode = JsonCreator.initArrayValue(sm.expectedTypes.peek()); + state = FIRST_ARRAY_ELEMENT_READY_STATE; + } else if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else if (ch == QUOTES) { + sm.currentQuoteChar = ch; + state = STRING_VALUE_STATE; + } else if (ch == EOF) { + throw new JsonParserException("empty JSON document"); + } else { + state = NON_STRING_VALUE_STATE; + } + break; + } + if (state == NON_STRING_VALUE_STATE) { + sm.index = i; + } else { + sm.index = i + 1; + } + return state; + } + } + + /** + * Represents the JSON document end state. + */ + private static class DocumentEndState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + char ch; + State state = null; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (StateMachine.isWhitespace(ch) || ch == EOF) { + state = this; + continue; + } + throw new JsonParserException("JSON document has already ended"); + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state just before the first object field is defined. + */ + private static class FirstFieldReadyState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + char ch; + State state = null; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == QUOTES) { + state = FIELD_NAME_STATE; + sm.currentQuoteChar = ch; + } else if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else if (ch == '}') { + state = sm.finalizeNonArrayObjectAndRemoveExpectedType(); + } else { + StateMachine.throwExpected("\"", "}"); + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state just before the first array element is defined. + */ + private static class FirstArrayElementReadyState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else if (ch == QUOTES) { + state = STRING_ARRAY_ELEMENT_STATE; + sm.currentQuoteChar = ch; + sm.expectedTypes.push(JsonCreator.getMemberType(sm.expectedTypes.peek(), + sm.arrayIndexes.peek(), sm.allowDataProjection)); + } else if (ch == '{') { + // Get member type of the array and set as expected type. + sm.expectedTypes.push(JsonCreator.getMemberType(sm.expectedTypes.peek(), + sm.arrayIndexes.peek(), sm.allowDataProjection)); + JsonCreator.updateNextMapValue(sm); + state = FIRST_FIELD_READY_STATE; + } else if (ch == '[') { + // Get member type of the array and set as expected type. + sm.expectedTypes.push(JsonCreator.getMemberType(sm.expectedTypes.peek(), + sm.arrayIndexes.peek(), sm.allowDataProjection)); + sm.updateNextArrayValue(); + state = FIRST_ARRAY_ELEMENT_READY_STATE; + } else if (ch == ']') { + state = sm.finalizeArrayObject(); + } else { + state = NON_STRING_ARRAY_ELEMENT_STATE; + sm.expectedTypes.push(JsonCreator.getMemberType(sm.expectedTypes.peek(), + sm.arrayIndexes.peek(), sm.allowDataProjection)); + } + break; + } + if (state == NON_STRING_ARRAY_ELEMENT_STATE) { + sm.index = i; + } else { + sm.index = i + 1; + } + return state; + } + } + + /** + * Represents the state just before a non-first object field is defined. + */ + private static class NonFirstFieldReadyState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == QUOTES) { + sm.currentQuoteChar = ch; + state = FIELD_NAME_STATE; + } else if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else { + StateMachine.throwExpected("\""); + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state just before a non-first array element is defined. + */ + private static class NonFirstArrayElementReadyState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else if (ch == QUOTES) { + state = STRING_ARRAY_ELEMENT_STATE; + sm.currentQuoteChar = ch; + sm.expectedTypes.push(JsonCreator.getMemberType(sm.expectedTypes.peek(), + sm.arrayIndexes.peek(), sm.allowDataProjection)); + } else if (ch == '{') { + sm.expectedTypes.push(JsonCreator.getMemberType(sm.expectedTypes.peek(), + sm.arrayIndexes.peek(), sm.allowDataProjection)); + JsonCreator.updateNextMapValue(sm); + state = FIRST_FIELD_READY_STATE; + } else if (ch == '[') { + sm.expectedTypes.push(JsonCreator.getMemberType(sm.expectedTypes.peek(), + sm.arrayIndexes.peek(), sm.allowDataProjection)); + sm.updateNextArrayValue(); + state = FIRST_ARRAY_ELEMENT_READY_STATE; + } else { + sm.expectedTypes.push(JsonCreator.getMemberType(sm.expectedTypes.peek(), + sm.arrayIndexes.peek(), sm.allowDataProjection)); + state = NON_STRING_ARRAY_ELEMENT_STATE; + } + break; + } + if (state == NON_STRING_ARRAY_ELEMENT_STATE) { + sm.index = i; + } else { + sm.index = i + 1; + } + return state; + } + } + + private String value() { + String result = new String(this.charBuff, 0, this.charBuffIndex); + this.charBuffIndex = 0; + return result; + } + + private String processFieldName() { + return this.value(); + } + + /** + * Represents the state during a field name. + */ + private static class FieldNameState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + char ch; + State state = null; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == sm.currentQuoteChar) { + String jsonFieldName = sm.processFieldName(); + if (sm.jsonFieldDepth == 0) { + Field currentField = sm.visitedFieldHierarchy.peek().get(jsonFieldName); + if (currentField == null) { + currentField = sm.fieldHierarchy.peek().remove(jsonFieldName); + } + sm.currentField = currentField; + + Type fieldType; + if (currentField == null) { + fieldType = sm.restType.peek(); + } else { + // Replace modified field name with actual field name. + jsonFieldName = currentField.getFieldName(); + fieldType = currentField.getFieldType(); + sm.visitedFieldHierarchy.peek().put(jsonFieldName, currentField); + } + sm.expectedTypes.push(fieldType); + + if (!sm.allowDataProjection && fieldType == null) { + throw DiagnosticLog.error(DiagnosticErrorCode.UNDEFINED_FIELD, jsonFieldName); + } + } else if (sm.expectedTypes.peek() == null) { + sm.expectedTypes.push(null); + } + sm.fieldNameHierarchy.peek().push(jsonFieldName); + state = END_FIELD_NAME_STATE; + } else if (ch == REV_SOL) { + state = FIELD_NAME_ESC_CHAR_PROCESSING_STATE; + } else if (ch == EOF) { + throw new JsonParserException("unexpected end of JSON document"); + } else { + sm.append(ch); + state = this; + continue; + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state where a field name definition has ended. + */ + private static class EndFieldNameState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else if (ch == ':') { + state = FIELD_VALUE_READY_STATE; + } else { + StateMachine.throwExpected(":"); + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state where a field value is about to be defined. + */ + private static class FieldValueReadyState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else if (ch == QUOTES) { + state = STRING_FIELD_VALUE_STATE; + sm.currentQuoteChar = ch; + } else if (ch == '{') { + JsonCreator.updateNextMapValue(sm); + state = FIRST_FIELD_READY_STATE; + } else if (ch == '[') { + sm.arrayIndexes.push(0); + Optional nextArray = JsonCreator.initNewArrayValue(sm); + if (nextArray.isPresent()) { + sm.currentJsonNode = nextArray.get(); + } + state = FIRST_ARRAY_ELEMENT_READY_STATE; + } else { + state = NON_STRING_FIELD_VALUE_STATE; + } + break; + } + if (state == NON_STRING_FIELD_VALUE_STATE) { + sm.index = i; + } else { + sm.index = i + 1; + } + return state; + } + } + + /** + * Represents the state during a string field value is defined. + */ + private static class StringFieldValueState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == sm.currentQuoteChar) { + String s = sm.value(); + Type expType = sm.expectedTypes.pop(); + if (expType == null) { + state = FIELD_END_STATE; + break; + } + + if (sm.jsonFieldDepth > 0) { + sm.currentJsonNode = JsonCreator.convertAndUpdateCurrentJsonNode(sm, + StringUtils.fromString(s), expType); + } else if (sm.currentField != null || sm.restType.peek() != null) { + sm.currentJsonNode = JsonCreator.convertAndUpdateCurrentJsonNode(sm, + StringUtils.fromString(s), expType); + } + state = FIELD_END_STATE; + } else if (ch == REV_SOL) { + state = STRING_FIELD_ESC_CHAR_PROCESSING_STATE; + } else if (ch == EOF) { + throw new JsonParserException("unexpected end of JSON document"); + } else { + sm.append(ch); + state = this; + continue; + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state during a string array element is defined. + */ + private static class StringArrayElementState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == sm.currentQuoteChar) { + sm.processValue(); + state = ARRAY_ELEMENT_END_STATE; + } else if (ch == REV_SOL) { + state = STRING_AE_ESC_CHAR_PROCESSING_STATE; + } else if (ch == EOF) { + throw new JsonParserException("unexpected end of JSON document"); + } else { + sm.append(ch); + state = this; + continue; + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state during a non-string field value is defined. + */ + private static class NonStringFieldValueState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == '{') { + JsonCreator.updateNextMapValue(sm); + state = FIRST_FIELD_READY_STATE; + } else if (ch == '[') { + state = FIRST_ARRAY_ELEMENT_READY_STATE; + sm.updateNextArrayValue(); + } else if (ch == '}') { + sm.processValue(); + state = sm.finalizeNonArrayObjectAndRemoveExpectedType(); + } else if (ch == ']') { + sm.processValue(); + state = sm.finalizeArrayObject(); + } else if (ch == ',') { + sm.processValue(); + state = NON_FIRST_FIELD_READY_STATE; + } else if (StateMachine.isWhitespace(ch)) { + sm.processValue(); + state = FIELD_END_STATE; + } else if (ch == EOF) { + throw new JsonParserException("unexpected end of JSON document"); + } else { + sm.append(ch); + state = this; + continue; + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state during a non-string array element is defined. + */ + private static class NonStringArrayElementState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == '{') { + JsonCreator.updateNextMapValue(sm); + state = FIRST_FIELD_READY_STATE; + } else if (ch == '[') { + state = FIRST_ARRAY_ELEMENT_READY_STATE; + sm.updateNextArrayValue(); + } else if (ch == ']') { + sm.processValue(); + state = sm.finalizeArrayObject(); + } else if (ch == ',') { + sm.processValue(); + state = NON_FIRST_ARRAY_ELEMENT_READY_STATE; + sm.updateIndexOfArrayElement(); + } else if (StateMachine.isWhitespace(ch)) { + sm.processValue(); + state = ARRAY_ELEMENT_END_STATE; + } else if (ch == EOF) { + throw new JsonParserException("unexpected end of JSON document"); + } else { + sm.append(ch); + state = this; + continue; + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state during a string value is defined. + */ + private static class StringValueState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (ch == sm.currentQuoteChar) { + sm.currentJsonNode = JsonCreator.convertAndUpdateCurrentJsonNode(sm, + StringUtils.fromString(sm.value()), sm.expectedTypes.peek()); + state = DOC_END_STATE; + } else if (ch == REV_SOL) { + state = STRING_VAL_ESC_CHAR_PROCESSING_STATE; + } else if (ch == EOF) { + throw new JsonParserException("unexpected end of JSON document"); + } else { + sm.append(ch); + state = this; + continue; + } + break; + } + sm.index = i + 1; + return state; + } + } + + private void processValue() { + Type expType = expectedTypes.pop(); + BString value = StringUtils.fromString(value()); + if (expType == null) { + return; + } + currentJsonNode = JsonCreator.convertAndUpdateCurrentJsonNode(this, value, expType); + } + + /** + * Represents the state during a non-string value is defined. + */ + private static class NonStringValueState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (StateMachine.isWhitespace(ch) || ch == EOF) { + sm.currentJsonNode = null; + sm.processValue(); + state = DOC_END_STATE; + } else { + sm.append(ch); + state = this; + continue; + } + break; + } + sm.index = i + 1; + return state; + } + } + + /** + * Represents the state where an object field has ended. + */ + private static class FieldEndState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else if (ch == ',') { + state = NON_FIRST_FIELD_READY_STATE; + } else if (ch == '}') { + state = sm.finalizeNonArrayObjectAndRemoveExpectedType(); + } else { + StateMachine.throwExpected(",", "}"); + } + break; + } + sm.index = i + 1; + return state; + } + + } + + /** + * Represents the state where an array element has ended. + */ + private static class ArrayElementEndState implements State { + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if (StateMachine.isWhitespace(ch)) { + state = this; + continue; + } else if (ch == ',') { + state = NON_FIRST_ARRAY_ELEMENT_READY_STATE; + sm.updateIndexOfArrayElement(); + } else if (ch == ']') { + state = sm.finalizeArrayObject(); + } else { + StateMachine.throwExpected(",", "]"); + } + break; + } + sm.index = i + 1; + return state; + } + + } + + /** + * Represents the state where an escaped unicode character in hex format is processed + * from a object string field. + */ + private static class StringFieldUnicodeHexProcessingState extends UnicodeHexProcessingState { + + @Override + protected State getSourceState() { + return STRING_FIELD_VALUE_STATE; + } + + } + + /** + * Represents the state where an escaped unicode character in hex format is processed + * from an array string field. + */ + private static class StringAEProcessingState extends UnicodeHexProcessingState { + + @Override + protected State getSourceState() { + return STRING_ARRAY_ELEMENT_STATE; + } + + } + + /** + * Represents the state where an escaped unicode character in hex format is processed + * from a string value. + */ + private static class StringValueUnicodeHexProcessingState extends UnicodeHexProcessingState { + + @Override + protected State getSourceState() { + return STRING_VALUE_STATE; + } + + } + + /** + * Represents the state where an escaped unicode character in hex format is processed + * from a field name. + */ + private static class FieldNameUnicodeHexProcessingState extends UnicodeHexProcessingState { + + @Override + protected State getSourceState() { + return FIELD_NAME_STATE; + } + + } + + /** + * Represents the state where an escaped unicode character in hex format is processed. + */ + private abstract static class UnicodeHexProcessingState implements State { + + protected abstract State getSourceState(); + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + for (; i < count; i++) { + ch = buff[i]; + sm.processLocation(ch); + if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'F') || (ch >= 'a' && ch <= 'f')) { + sm.hexBuilder.append(ch); + if (sm.hexBuilder.length() >= 4) { + sm.append(this.extractUnicodeChar(sm)); + this.reset(sm); + state = this.getSourceState(); + break; + } + state = this; + continue; + } + this.reset(sm); + StateMachine.throwExpected("hexadecimal value of an unicode character"); + break; + } + sm.index = i + 1; + return state; + } + + private void reset(StateMachine sm) { + sm.hexBuilder.setLength(0); + } + + private char extractUnicodeChar(StateMachine sm) { + return StringEscapeUtils.unescapeJava("\\u" + sm.hexBuilder.toString()).charAt(0); + } + + } + + /** + * Represents the state where an escaped character is processed in a object string field. + */ + private static class StringFieldEscapedCharacterProcessingState extends EscapedCharacterProcessingState { + + @Override + protected State getSourceState() { + return STRING_FIELD_VALUE_STATE; + } + + } + + /** + * Represents the state where an escaped character is processed in an array string field. + */ + private static class StringAEEscapedCharacterProcessingState extends EscapedCharacterProcessingState { + + @Override + protected State getSourceState() { + return STRING_ARRAY_ELEMENT_STATE; + } + + } + + /** + * Represents the state where an escaped character is processed in a string value. + */ + private static class StringValueEscapedCharacterProcessingState extends EscapedCharacterProcessingState { + + @Override + protected State getSourceState() { + return STRING_VALUE_STATE; + } + + } + + /** + * Represents the state where an escaped character is processed in a field name. + */ + private static class FieldNameEscapedCharacterProcessingState extends EscapedCharacterProcessingState { + + @Override + protected State getSourceState() { + return FIELD_NAME_STATE; + } + + } + + /** + * Represents the state where an escaped character is processed. + */ + private abstract static class EscapedCharacterProcessingState implements State { + + protected abstract State getSourceState(); + + @Override + public State transition(StateMachine sm, char[] buff, int i, int count) throws JsonParserException { + State state = null; + char ch; + if (i < count) { + ch = buff[i]; + sm.processLocation(ch); + switch (ch) { + case '"': + sm.append(QUOTES); + state = this.getSourceState(); + break; + case '\\': + sm.append(REV_SOL); + state = this.getSourceState(); + break; + case '/': + sm.append(SOL); + state = this.getSourceState(); + break; + case 'b': + sm.append(BACKSPACE); + state = this.getSourceState(); + break; + case 'f': + sm.append(FORMFEED); + state = this.getSourceState(); + break; + case 'n': + sm.append(NEWLINE); + state = this.getSourceState(); + break; + case 'r': + sm.append(CR); + state = this.getSourceState(); + break; + case 't': + sm.append(HZ_TAB); + state = this.getSourceState(); + break; + case 'u': + if (this.getSourceState() == STRING_FIELD_VALUE_STATE) { + state = STRING_FIELD_UNICODE_HEX_PROCESSING_STATE; + } else if (this.getSourceState() == STRING_VALUE_STATE) { + state = STRING_VALUE_UNICODE_HEX_PROCESSING_STATE; + } else if (this.getSourceState() == FIELD_NAME_STATE) { + state = FIELD_NAME_UNICODE_HEX_PROCESSING_STATE; + } else if (this.getSourceState() == STRING_ARRAY_ELEMENT_STATE) { + state = STRING_AE_PROCESSING_STATE; + } else { + throw new JsonParserException("unknown source '" + this.getSourceState() + + "' in escape char processing state"); + } + break; + default: + StateMachine.throwExpected("escaped characters"); + } + } + sm.index = i + 1; + return state; + } + + } + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonTraverse.java b/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonTraverse.java new file mode 100644 index 0000000..6258c9a --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/json/JsonTraverse.java @@ -0,0 +1,300 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.json; + +import io.ballerina.lib.data.jsondata.utils.Constants; +import io.ballerina.lib.data.jsondata.utils.DiagnosticErrorCode; +import io.ballerina.lib.data.jsondata.utils.DiagnosticLog; +import io.ballerina.runtime.api.PredefinedTypes; +import io.ballerina.runtime.api.TypeTags; +import io.ballerina.runtime.api.creators.ValueCreator; +import io.ballerina.runtime.api.flags.SymbolFlags; +import io.ballerina.runtime.api.types.ArrayType; +import io.ballerina.runtime.api.types.Field; +import io.ballerina.runtime.api.types.IntersectionType; +import io.ballerina.runtime.api.types.MapType; +import io.ballerina.runtime.api.types.RecordType; +import io.ballerina.runtime.api.types.TupleType; +import io.ballerina.runtime.api.types.Type; +import io.ballerina.runtime.api.types.UnionType; +import io.ballerina.runtime.api.utils.StringUtils; +import io.ballerina.runtime.api.utils.TypeUtils; +import io.ballerina.runtime.api.utils.ValueUtils; +import io.ballerina.runtime.api.values.BArray; +import io.ballerina.runtime.api.values.BError; +import io.ballerina.runtime.api.values.BMap; +import io.ballerina.runtime.api.values.BString; + +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Stack; + +/** + * Traverse json tree. + * + * @since 0.1.0 + */ +public class JsonTraverse { + + private static final ThreadLocal tlJsonTree = ThreadLocal.withInitial(JsonTree::new); + + public static Object traverse(Object json, BMap options, Type type) { + JsonTree jsonTree = tlJsonTree.get(); + try { + jsonTree.allowDataProjection = (boolean) options.get(Constants.ALLOW_DATA_PROJECTION); + return jsonTree.traverseJson(json, type); + } catch (BError e) { + return e; + } finally { + jsonTree.reset(); + } + } + + static class JsonTree { + Field currentField; + Stack> fieldHierarchy = new Stack<>(); + Stack restType = new Stack<>(); + Deque fieldNames = new ArrayDeque<>(); + Type rootArray; + boolean allowDataProjection; + + void reset() { + currentField = null; + fieldHierarchy.clear(); + restType.clear(); + fieldNames.clear(); + rootArray = null; + } + + public Object traverseJson(Object json, Type type) { + Type referredType = TypeUtils.getReferredType(type); + switch (referredType.getTag()) { + case TypeTags.RECORD_TYPE_TAG -> { + RecordType recordType = (RecordType) referredType; + fieldHierarchy.push(JsonCreator.getAllFieldsInRecord(recordType)); + restType.push(recordType.getRestFieldType()); + return traverseMapJsonOrArrayJson(json, + ValueCreator.createRecordValue(type.getPackage(), type.getName()), referredType); + } + case TypeTags.ARRAY_TAG -> { + rootArray = referredType; + return traverseMapJsonOrArrayJson(json, ValueCreator.createArrayValue((ArrayType) referredType), + referredType); + } + case TypeTags.TUPLE_TAG -> { + rootArray = referredType; + return traverseMapJsonOrArrayJson(json, ValueCreator.createTupleValue((TupleType) referredType), + referredType); + } + case TypeTags.NULL_TAG, TypeTags.BOOLEAN_TAG, TypeTags.INT_TAG, TypeTags.FLOAT_TAG, + TypeTags.DECIMAL_TAG, TypeTags.STRING_TAG, TypeTags.CHAR_STRING_TAG , TypeTags.BYTE_TAG, + TypeTags.SIGNED8_INT_TAG, TypeTags.SIGNED16_INT_TAG, TypeTags.SIGNED32_INT_TAG, + TypeTags.UNSIGNED8_INT_TAG, TypeTags.UNSIGNED16_INT_TAG, TypeTags.UNSIGNED32_INT_TAG, + TypeTags.FINITE_TYPE_TAG -> { + return convertToBasicType(json, referredType); + } + case TypeTags.UNION_TAG -> { + for (Type memberType : ((UnionType) referredType).getMemberTypes()) { + try { + return traverseJson(json, memberType); + } catch (Exception e) { + // Ignore + } + } + throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE, type, PredefinedTypes.TYPE_ANYDATA); + } + case TypeTags.JSON_TAG, TypeTags.ANYDATA_TAG -> { + return json; + } + case TypeTags.MAP_TAG -> { + MapType mapType = (MapType) referredType; + fieldHierarchy.push(new HashMap<>()); + restType.push(mapType.getConstrainedType()); + return traverseMapJsonOrArrayJson(json, ValueCreator.createMapValue(mapType), referredType); + } + case TypeTags.INTERSECTION_TAG -> { + Type effectiveType = ((IntersectionType) referredType).getEffectiveType(); + if (!SymbolFlags.isFlagOn(SymbolFlags.READONLY, effectiveType.getFlags())) { + throw DiagnosticLog.error(DiagnosticErrorCode.UNSUPPORTED_TYPE, type); + } + for (Type constituentType : ((IntersectionType) referredType).getConstituentTypes()) { + if (constituentType.getTag() == TypeTags.READONLY_TAG) { + continue; + } + return JsonCreator.constructReadOnlyValue(traverseJson(json, constituentType)); + } + throw DiagnosticLog.error(DiagnosticErrorCode.UNSUPPORTED_TYPE, type); + } + default -> + throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE, type, PredefinedTypes.TYPE_ANYDATA); + } + } + + private Object traverseMapJsonOrArrayJson(Object json, Object currentJsonNode, Type type) { + if (json instanceof BMap bMap) { + return traverseMapValue(bMap, currentJsonNode); + } else if (json instanceof BArray bArray) { + return traverseArrayValue(bArray, currentJsonNode); + } else { + // JSON value not compatible with map or array. + if (type.getTag() == TypeTags.RECORD_TYPE_TAG) { + this.fieldHierarchy.pop(); + this.restType.pop(); + } + + if (fieldNames.isEmpty()) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, type, json); + } + throw DiagnosticLog.error(DiagnosticErrorCode.INVALID_TYPE_FOR_FIELD, getCurrentFieldPath()); + } + } + + private Object traverseMapValue(BMap map, Object currentJsonNode) { + for (BString key : map.getKeys()) { + currentField = fieldHierarchy.peek().remove(key.toString()); + if (currentField == null) { + // Add to the rest field + if (restType.peek() != null) { + Type restFieldType = TypeUtils.getReferredType(restType.peek()); + addRestField(restFieldType, key, map.get(key), currentJsonNode); + } + if (allowDataProjection) { + continue; + } + throw DiagnosticLog.error(DiagnosticErrorCode.UNDEFINED_FIELD, key); + } + + String fieldName = currentField.getFieldName(); + fieldNames.push(fieldName); + Type currentFieldType = TypeUtils.getReferredType(currentField.getFieldType()); + int currentFieldTypeTag = currentFieldType.getTag(); + Object mapValue = map.get(key); + + switch (currentFieldTypeTag) { + case TypeTags.NULL_TAG, TypeTags.BOOLEAN_TAG, TypeTags.INT_TAG, TypeTags.FLOAT_TAG, + TypeTags.DECIMAL_TAG, TypeTags.STRING_TAG -> { + Object value = convertToBasicType(mapValue, currentFieldType); + ((BMap) currentJsonNode).put(StringUtils.fromString(fieldNames.pop()), value); + } + default -> + ((BMap) currentJsonNode).put(StringUtils.fromString(fieldName), + traverseJson(mapValue, currentFieldType)); + } + } + Map currentField = fieldHierarchy.pop(); + checkOptionalFieldsAndLogError(currentField); + restType.pop(); + return currentJsonNode; + } + + private Object traverseArrayValue(BArray array, Object currentJsonNode) { + switch (rootArray.getTag()) { + case TypeTags.ARRAY_TAG -> { + ArrayType arrayType = (ArrayType) rootArray; + int expectedArraySize = arrayType.getSize(); + long sourceArraySize = array.getLength(); + if (!allowDataProjection && expectedArraySize < sourceArraySize) { + throw DiagnosticLog.error(DiagnosticErrorCode.ARRAY_SIZE_MISMATCH); + } + + Type elementType = arrayType.getElementType(); + if (expectedArraySize == -1 || expectedArraySize > sourceArraySize) { + traverseArrayMembers(array.getLength(), array, elementType, currentJsonNode); + } else { + traverseArrayMembers(expectedArraySize, array, elementType, currentJsonNode); + } + } + case TypeTags.TUPLE_TAG -> { + TupleType tupleType = (TupleType) rootArray; + Type restType = tupleType.getRestType(); + int expectedTupleTypeCount = tupleType.getTupleTypes().size(); + for (int i = 0; i < array.getLength(); i++) { + Object jsonMember = array.get(i); + Object nextJsonNode; + if (i < expectedTupleTypeCount) { + nextJsonNode = traverseJson(jsonMember, tupleType.getTupleTypes().get(i)); + } else if (restType != null) { + nextJsonNode = traverseJson(jsonMember, restType); + } else if (!allowDataProjection) { + throw DiagnosticLog.error(DiagnosticErrorCode.ARRAY_SIZE_MISMATCH); + } else { + continue; + } + ((BArray) currentJsonNode).add(i, nextJsonNode); + } + } + } + return currentJsonNode; + } + + private void traverseArrayMembers(long length, BArray array, Type elementType, Object currentJsonNode) { + for (int i = 0; i < length; i++) { + ((BArray) currentJsonNode).add(i, traverseJson(array.get(i), elementType)); + } + } + + private void addRestField(Type restFieldType, BString key, Object jsonMember, Object currentJsonNode) { + Object nextJsonValue; + switch (restFieldType.getTag()) { + case TypeTags.ANYDATA_TAG, TypeTags.JSON_TAG -> + ((BMap) currentJsonNode).put(key, jsonMember); + case TypeTags.BOOLEAN_TAG, TypeTags.INT_TAG, TypeTags.FLOAT_TAG, TypeTags.DECIMAL_TAG, + TypeTags.STRING_TAG -> { + ((BMap) currentJsonNode).put(key, convertToBasicType(jsonMember, restFieldType)); + } + default -> { + nextJsonValue = traverseJson(jsonMember, restFieldType); + ((BMap) currentJsonNode).put(key, nextJsonValue); + } + } + } + + private void checkOptionalFieldsAndLogError(Map currentField) { + currentField.values().forEach(field -> { + if (SymbolFlags.isFlagOn(field.getFlags(), SymbolFlags.REQUIRED)) { + throw DiagnosticLog.error(DiagnosticErrorCode.REQUIRED_FIELD_NOT_PRESENT, field.getFieldName()); + } + }); + } + + private Object convertToBasicType(Object json, Type targetType) { + try { + return ValueUtils.convert(json, targetType); + } catch (BError e) { + if (fieldNames.isEmpty()) { + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_TYPE, targetType, json.toString()); + } + throw DiagnosticLog.error(DiagnosticErrorCode.INCOMPATIBLE_VALUE_FOR_FIELD, json.toString(), targetType, + getCurrentFieldPath()); + } + } + + private String getCurrentFieldPath() { + Iterator itr = fieldNames.descendingIterator(); + StringBuilder sb = new StringBuilder(itr.hasNext() ? itr.next() : ""); + while (itr.hasNext()) { + sb.append(".").append(itr.next()); + } + return sb.toString(); + } + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/json/Native.java b/native/src/main/java/io/ballerina/lib/data/jsondata/json/Native.java new file mode 100644 index 0000000..23a9d3d --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/json/Native.java @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.json; + +import io.ballerina.lib.data.jsondata.io.DataReaderTask; +import io.ballerina.lib.data.jsondata.io.DataReaderThreadPool; +import io.ballerina.runtime.api.Environment; +import io.ballerina.runtime.api.Future; +import io.ballerina.runtime.api.utils.JsonUtils; +import io.ballerina.runtime.api.values.BArray; +import io.ballerina.runtime.api.values.BError; +import io.ballerina.runtime.api.values.BMap; +import io.ballerina.runtime.api.values.BObject; +import io.ballerina.runtime.api.values.BStream; +import io.ballerina.runtime.api.values.BString; +import io.ballerina.runtime.api.values.BTypedesc; + +import java.io.ByteArrayInputStream; +import java.io.InputStreamReader; +import java.io.StringReader; + +/** + * Json conversions. + * + * @since 0.1.0 + */ +public class Native { + + public static Object parseAsType(Object json, BMap options, BTypedesc typed) { + try { + return JsonTraverse.traverse(json, options, typed.getDescribingType()); + } catch (BError e) { + return e; + } + } + + public static Object parseString(BString json, BMap options, BTypedesc typed) { + try { + return JsonParser.parse(new StringReader(json.getValue()), options, typed.getDescribingType()); + } catch (BError e) { + return e; + } + } + + public static Object parseBytes(BArray json, BMap options, BTypedesc typed) { + try { + byte[] bytes = json.getBytes(); + return JsonParser.parse(new InputStreamReader(new ByteArrayInputStream(bytes)), options, + typed.getDescribingType()); + } catch (BError e) { + return e; + } + } + + public static Object parseStream(Environment env, BStream json, BMap options, BTypedesc typed) { + final BObject iteratorObj = json.getIteratorObj(); + final Future future = env.markAsync(); + DataReaderTask task = new DataReaderTask(env, iteratorObj, future, typed, options); + DataReaderThreadPool.EXECUTOR_SERVICE.submit(task); + return null; + } + + public static Object toJson(Object value) { + return JsonUtils.convertToJson(value); + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/utils/Constants.java b/native/src/main/java/io/ballerina/lib/data/jsondata/utils/Constants.java new file mode 100644 index 0000000..a319049 --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/utils/Constants.java @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.utils; + +import io.ballerina.runtime.api.PredefinedTypes; +import io.ballerina.runtime.api.creators.TypeCreator; +import io.ballerina.runtime.api.types.MapType; +import io.ballerina.runtime.api.utils.StringUtils; +import io.ballerina.runtime.api.values.BString; + +/** + * Constants for jsondata. + * + * @since 0.1.0 + */ +public class Constants { + public static final MapType JSON_MAP_TYPE = TypeCreator.createMapType(PredefinedTypes.TYPE_JSON); + public static final MapType ANYDATA_MAP_TYPE = TypeCreator.createMapType(PredefinedTypes.TYPE_ANYDATA); + public static final BString VALUE = StringUtils.fromString("value"); + public static final String FIELD = "$field$."; + public static final String FIELD_REGEX = "\\$field\\$\\."; + public static final String NAME = "Name"; + public static final BString ALLOW_DATA_PROJECTION = StringUtils.fromString("allowDataProjection"); +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/utils/DiagnosticErrorCode.java b/native/src/main/java/io/ballerina/lib/data/jsondata/utils/DiagnosticErrorCode.java new file mode 100644 index 0000000..de34ddb --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/utils/DiagnosticErrorCode.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.utils; + +/** + * Represents a diagnostic error code. + * + * @since 0.1.0 + */ +public enum DiagnosticErrorCode { + + UNSUPPORTED_TYPE("JSON_ERROR_001", "unsupported.type"), + JSON_READER_FAILURE("JSON_ERROR_002", "json.reader.failure"), + JSON_PARSER_EXCEPTION("JSON_ERROR_003", "json.parser.exception"), + INCOMPATIBLE_TYPE("JSON_ERROR_004", "incompatible.type"), + ARRAY_SIZE_MISMATCH("JSON_ERROR_005", "array.size.mismatch"), + INVALID_TYPE("JSON_ERROR_006", "invalid.type"), + INCOMPATIBLE_VALUE_FOR_FIELD("JSON_ERROR_007", "incompatible.value.for.field"), + REQUIRED_FIELD_NOT_PRESENT("JSON_ERROR_008", "required.field.not.present"), + INVALID_TYPE_FOR_FIELD("JSON_ERROR_009", "invalid.type.for.field"), + DUPLICATE_FIELD("JSON_ERROR_010", "duplicate.field"), + CANNOT_CONVERT_TO_EXPECTED_TYPE("JSON_ERROR_011", "cannot.convert.to.expected.type"), + UNDEFINED_FIELD("JSON_ERROR_012", "undefined.field"); + + String diagnosticId; + String messageKey; + + DiagnosticErrorCode(String diagnosticId, String messageKey) { + this.diagnosticId = diagnosticId; + this.messageKey = messageKey; + } + + public String messageKey() { + return messageKey; + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/utils/DiagnosticLog.java b/native/src/main/java/io/ballerina/lib/data/jsondata/utils/DiagnosticLog.java new file mode 100644 index 0000000..437eb17 --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/utils/DiagnosticLog.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.utils; + +import io.ballerina.runtime.api.creators.ErrorCreator; +import io.ballerina.runtime.api.utils.StringUtils; +import io.ballerina.runtime.api.values.BError; + +import java.text.MessageFormat; +import java.util.Locale; +import java.util.ResourceBundle; + +/** + * Diagnostic log for jsondata module. + * + * @since 0.1.0 + */ +public class DiagnosticLog { + private static final String ERROR_PREFIX = "error"; + private static final String ERROR = "Error"; + private static final ResourceBundle MESSAGES = ResourceBundle.getBundle("error", Locale.getDefault()); + + public static BError error(DiagnosticErrorCode code, Object... args) { + String msg = formatMessage(code, args); + return getJsonError(msg); + } + + private static String formatMessage(DiagnosticErrorCode code, Object[] args) { + String msgKey = MESSAGES.getString(ERROR_PREFIX + "." + code.messageKey()); + return MessageFormat.format(msgKey, args); + } + + public static BError getJsonError(String message) { + return ErrorCreator.createError(ModuleUtils.getModule(), ERROR, StringUtils.fromString(message), + null, null); + } +} diff --git a/native/src/main/java/io/ballerina/lib/data/jsondata/utils/ModuleUtils.java b/native/src/main/java/io/ballerina/lib/data/jsondata/utils/ModuleUtils.java new file mode 100644 index 0000000..879316b --- /dev/null +++ b/native/src/main/java/io/ballerina/lib/data/jsondata/utils/ModuleUtils.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.ballerina.lib.data.jsondata.utils; + +import io.ballerina.runtime.api.Environment; +import io.ballerina.runtime.api.Module; + +/** + * This class will hold module related utility functions. + * + * @since 0.1.0 + */ +public class ModuleUtils { + + /** + * Time standard library package ID. + */ + private static Module module = null; + + private ModuleUtils() { + } + + public static void setModule(Environment env) { + module = env.getCurrentModule(); + } + + public static Module getModule() { + return module; + } +} diff --git a/native/src/main/java/module-info.java b/native/src/main/java/module-info.java new file mode 100644 index 0000000..18ec6bf --- /dev/null +++ b/native/src/main/java/module-info.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +module io.ballerina.stdlib.data { + requires io.ballerina.runtime; + requires io.ballerina.lang.value; + requires junit; + requires org.apache.commons.lang3; + exports io.ballerina.lib.data.jsondata.json; +} diff --git a/native/src/main/resources/META-INF/native-image/io.ballerina.stdlib/data/jsondata-native/resource-config.json b/native/src/main/resources/META-INF/native-image/io.ballerina.stdlib/data/jsondata-native/resource-config.json new file mode 100644 index 0000000..befe09e --- /dev/null +++ b/native/src/main/resources/META-INF/native-image/io.ballerina.stdlib/data/jsondata-native/resource-config.json @@ -0,0 +1,6 @@ +{ + "bundles":[{ + "name":"error", + "locales":[""] + }] +} diff --git a/native/src/main/resources/error.properties b/native/src/main/resources/error.properties new file mode 100644 index 0000000..3db6b8b --- /dev/null +++ b/native/src/main/resources/error.properties @@ -0,0 +1,57 @@ +# +# Copyright (c) 2024, WSO2 LLC. (http://www.wso2.com). +# +# WSO2 LLC. licenses this file to you under the Apache License, +# Version 2.0 (the "License"); you may not use this file except +# in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# ------------------------- +# Json Data module error messages +# ------------------------- + +error.unsupported.type=\ + unsupported type ''{0}'' + +error.json.reader.failure=\ + error reading while JSON: ''{0}'' + +error.json.parser.exception=\ + ''{0}'' at line: ''{1}'' column: ''{2}'' + +error.incompatible.type=\ + incompatible expected type ''{0}'' for value ''{1}'' + +error.array.size.mismatch=\ + array size is not compatible with the expected size + +error.invalid.type=\ + invalid type ''{0}'' expected ''{1}'' + +error.incompatible.value.for.field=\ + incompatible value ''{0}'' for type ''{1}'' in field ''{2}'' + +error.required.field.not.present=\ + required field ''{0}'' not present in JSON + +error.invalid.type.for.field=\ + invalid type for field ''{0}'' + +error.duplicate.field=\ + duplicate field ''{0}'' + +error.cannot.convert.to.expected.type=\ + ''{0}'' value ''{1}'' cannot be converted to ''{2}'' + +error.undefined.field=\ + undefined field ''{0}'' diff --git a/settings.gradle b/settings.gradle new file mode 100644 index 0000000..3061afb --- /dev/null +++ b/settings.gradle @@ -0,0 +1,41 @@ +/** + * Copyright (c) 2024, WSO2 LLC. (https://www.wso2.com). + * + * WSO2 LLC. licenses this file to you under the Apache License, + * Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +plugins { + id "com.gradle.enterprise" version "3.2" +} + +rootProject.name = 'data.jsondata' +include(':checkstyle') +include(':data.jsondata-native') +include(':data.jsondata-ballerina') +include(':data.jsondata-compiler-plugin') +include(':data.jsondata-compiler-plugin-tests') + +project(':checkstyle').projectDir = file("build-config${File.separator}checkstyle") +project(':data.jsondata-native').projectDir = file('native') +project(':data.jsondata-ballerina').projectDir = file('ballerina') +project(':data.jsondata-compiler-plugin').projectDir = file('compiler-plugin') +project(':data.jsondata-compiler-plugin-tests').projectDir = file('compiler-plugin-test') + +gradleEnterprise { + buildScan { + termsOfServiceUrl = 'https://gradle.com/terms-of-service' + termsOfServiceAgree = 'yes' + } +}