diff --git a/.github/workflows/clippy-rustfmt-fix.yml b/.github/workflows/clippy-rustfmt-fix.yml
index 620863af..fc2cfd76 100644
--- a/.github/workflows/clippy-rustfmt-fix.yml
+++ b/.github/workflows/clippy-rustfmt-fix.yml
@@ -12,7 +12,7 @@ env:
target/
jobs:
- publish:
+ run-and-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -23,15 +23,19 @@ jobs:
- name: Run automated fixes
run: |
- cargo clippy --fix
+ # Run clippy on projects
+ cargo clippy --fix --manifest-path ./parser/Cargo.toml --allow-dirty
+ cargo clippy --fix --manifest-path ./checker/Cargo.toml --allow-dirty
+ cargo clippy --fix --allow-dirty
+
+ # Format
cargo fmt
- - name: Commit
+ - name: Commit changes
run: |
- git add .
- git commit -m "Run clippy --fix & formatting"
-
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
-
+
+ git add .
+ git commit -m "Run clippy --fix & cargo fmt"
git push
diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml
index 86484f2c..f2fd325a 100644
--- a/.github/workflows/github-release.yml
+++ b/.github/workflows/github-release.yml
@@ -21,7 +21,8 @@ jobs:
runs-on: ubuntu-latest
outputs:
- new-ezno-version: ${{ steps.get-version.outputs.new-ezno-version }}
+ LATEST_EZNO_VERSION: ${{ steps.get-version.outputs.LATEST_EZNO_VERSION }}
+ LATEST_EZNO_VERSION_DASH: ${{ steps.get-version.outputs.LATEST_EZNO_VERSION_DASH }}
SPONSORS: ${{ steps.get-sponsors-and-contributors.outputs.SPONSORS }}
CONTRIBUTORS: ${{ steps.get-sponsors-and-contributors.outputs.CONTRIBUTORS }}
@@ -38,43 +39,32 @@ jobs:
git for-each-ref --sort=creatordate --format '%(refname:short)' 'refs/tags/release/ezno-[0-9]*'
echo "::endgroup::"
- TAG=$(git for-each-ref --sort=creatordate --format '%(refname:short)' 'refs/tags/release/ezno-[0-9]*' | tail -n 1)
- echo "Building GH release for ${TAG:13}"
- echo "new-ezno-version=${TAG:13}" >> $GITHUB_OUTPUT
+ TAG=$(git for-each-ref --sort=creatordate --format '%(refname:short)' 'refs/tags/release/ezno-[0-9]*' | tail -n 1 | cut -c 14-)
- # Replace '.' with '-'
- NAME_VERSION=$(echo $VERSION | sed -e "s/\./-/g")
+ echo "::notice::Releasing with found version $TAG"
+ echo "LATEST_EZNO_VERSION=${TAG}" >> "$GITHUB_OUTPUT"
+ echo "LATEST_EZNO_VERSION_DASH=${TAG//./-}" >> "$GITHUB_OUTPUT"
else
- VERSION="${{ inputs.ezno-version }}"
- echo "Building GH release for ${VERSION}"
- echo "new-ezno-version=${VERSION}" >> $GITHUB_OUTPUT
+ TAG="${{ inputs.ezno-version }}"
- # Replace '.' with '-'
- NAME_VERSION=$(echo $VERSION | sed -e "s/\./-/g")
+ echo "::notice::Releasing with specific version $TAG"
+ echo "LATEST_EZNO_VERSION=${TAG}" >> "$GITHUB_OUTPUT"
+ echo "LATEST_EZNO_VERSION_DASH=${TAG//./-}" >> "$GITHUB_OUTPUT"
fi
- - id: get-sponsors-and-contributors
+ - name: Get sponsors and contributors
+ id: get-sponsors-and-contributors
run: |
- SPONSORS=$(gh api graphql -f query='{
- user(login: "kaleidawave") {
- sponsorshipsAsMaintainer(first: 100, activeOnly: false) {
- edges {
- node {
- sponsor {
- name, login
- }
- }
- }
- }
- }
- }' -q '.data.user.sponsorshipsAsMaintainer.edges | map(.node.sponsor.name // .node.sponsor.login) | join(",")')
-
- CONTRIBUTORS=$(
- gh pr list --state merged --json author | jq 'map(.author.name // .author.login) | unique | join(",")' --raw-output
- )
+ SQP='.data.user.sponsorshipsAsMaintainer.edges | map(.node.sponsor.name // .node.sponsor.login) | join(",")'
+ GQL_SQ='{ user(login: "kaleidawave") { sponsorshipsAsMaintainer(first: 100, activeOnly: false) { edges { node { sponsor { name, login } } } } } }'
+ SPONSORS=$(gh api graphql -f query="$GQL_SQ" -q "$SQP")
+
+ CQP='map(.author.name // .author.login) | unique | join(",")'
+ CONTRIBUTORS=$(gh pr list --state merged --json author | jq "$CQP" --raw-output)
- echo "SPONSORS=$SPONSORS" # >> $GITHUB_OUTPUT
- echo "CONTRIBUTORS=$CONTRIBUTORS"
+ echo "SPONSORS=$SPONSORS" >> "$GITHUB_OUTPUT"
+ echo "CONTRIBUTORS=$CONTRIBUTORS" >> "$GITHUB_OUTPUT"
+ echo "::notice::CONTRIBUTORS=$CONTRIBUTORS and SPONSORS=$SPONSORS"
shell: bash
env:
@@ -88,13 +78,16 @@ jobs:
os: [ubuntu-latest, windows-latest]
include:
- os: windows-latest
- executable-extension: .exe
- platform_name: x86_64-pc-windows
+ platform-name: x86_64-pc-windows
+ executable-extension: ".exe"
- os: ubuntu-latest
- platform_name: x86_64-unknown-linux
+ platform-name: x86_64-unknown-linux
runs-on: ${{ matrix.os }}
+ env:
+ LEVEL: release
+
# Important that everything here works in all the above OSes!
steps:
- uses: actions/checkout@v4
@@ -110,10 +103,10 @@ jobs:
SPONSORS: ${{ needs.get-build-info.outputs.SPONSORS }}
CONTRIBUTORS: ${{ needs.get-build-info.outputs.CONTRIBUTORS }}
- - name: Rename and move release assets
+ - name: Rename and move ${{ env.LEVEL }} assets
run: |
mkdir artifacts
- mv target/release/ezno${{ matrix.executable-extension }} "artifacts/ezno-${{ needs.get-build-info.outputs.new-ezno-version }}-${{ matrix.platform_name }}${{ matrix.executable-extension }}"
+ mv "target/${{ env.LEVEL }}/ezno${{ matrix.executable-extension }}" "artifacts/ezno-${{ needs.get-build-info.outputs.LATEST_EZNO_VERSION_DASH }}-${{ matrix.platform-name }}${{ matrix.executable-extension }}"
- uses: actions/upload-artifact@v4
with:
@@ -144,8 +137,8 @@ jobs:
- name: GitHub release
uses: softprops/action-gh-release@v1
with:
- name: "Ezno ${{ needs.get-build-info.outputs.new-ezno-version }}"
- tag_name: "release/ezno-${{ needs.get-build-info.outputs.new-ezno-version }}"
+ name: "Ezno ${{ needs.get-build-info.outputs.LATEST_EZNO_VERSION }}"
+ tag_name: "release/ezno-${{ needs.get-build-info.outputs.LATEST_EZNO_VERSION }}"
body: "For @kaleidawave to update"
files: |
README.md
diff --git a/.github/workflows/performance-and-size.yml b/.github/workflows/performance-and-size.yml
index e58e86c5..6d5dcdb6 100644
--- a/.github/workflows/performance-and-size.yml
+++ b/.github/workflows/performance-and-size.yml
@@ -28,21 +28,47 @@ jobs:
- uses: brndnmtthws/rust-action-cargo-binstall@v1
with:
packages: hyperfine
+
+ - name: Install valgrind
+ run: sudo apt-get install valgrind
- name: Build Ezno
run: cargo build --release
env:
CARGO_PROFILE_RELEASE_DEBUG: true
+ - name: Get base ezno
+ if: github.ref_name != 'main'
+ uses: actions/download-artifact@v4
+ continue-on-error: true
+ with:
+ name: latest-checker
+ path: previous-ezno
+
+ - name: Set compilers
+ id: compilers
+ shell: bash
+ run: |
+ if [ -d "previous-ezno" ]; then
+ echo "::notice::Comparing against previous"
+ echo "BINARIES=./target/release/ezno,./previous-ezno/ezno" >> "$GITHUB_OUTPUT"
+ else
+ echo "::notice::Running singularly"
+ echo "BINARIES=./target/release/ezno" >> "$GITHUB_OUTPUT"
+ fi
+
- name: Run checker performance
shell: bash
run: |
# Generate a file which contains everything that Ezno currently implements
- cargo run -p ezno-parser --example code_blocks_to_script ./checker/specification/specification.md --comment-headers --out ./demo.tsx
+ cargo run -p ezno-parser \
+ --example code_blocks_to_script ./checker/specification/specification.md \
+ --comment-headers \
+ --out ./demo.tsx
echo "### Checking
\`\`\`shell
- $(hyperfine -i './target/release/ezno check demo.tsx')
+ $(hyperfine -i -L compiler ${{ steps.compilers.outputs.BINARIES }} '{compiler} check demo.tsx')
\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "
@@ -58,8 +84,8 @@ jobs:
echo "::info::Wrote code to summary"
command_output=$(./target/release/ezno check demo.tsx --timings --max-diagnostics all 2>&1 || true)
+
diagnostics=""; statistics=""; found_splitter=false;
-
while IFS= read -r line; do
if [[ "$line" == "---"* ]]; then found_splitter=true;
elif [[ "$found_splitter" == false ]]; then diagnostics+="$line"$'\n';
@@ -73,16 +99,35 @@ jobs:
$diagnostics
\`\`\`
-
-
- Statistics
-
- \`\`\`
- $statistics
- \`\`\`
-
" >> $GITHUB_STEP_SUMMARY
+ if [ -d "previous-ezno" ]; then
+ OUT=$(./previous-ezno/ezno check demo.tsx --timings --max-diagnostics all 2>&1 || true)
+ $base_statistics=$(echo $OUT | rg "Diagnostics:" -A 100)
+ echo "
+
+ Statistics
+
+ \`\`\`
+ $statistics
+ \`\`\`
+ against base
+ \`\`\`
+ $base_statistics
+ \`\`\`
+ against base
+
+ " >> $GITHUB_STEP_SUMMARY
+ else
+ echo "
+ Statistics
+
+ \`\`\`
+ $statistics
+ \`\`\`
+ " >> $GITHUB_STEP_SUMMARY
+ fi
+
- name: Run checker performance w/staging
shell: bash
if: github.ref_name != 'main'
@@ -93,7 +138,8 @@ jobs:
cargo run -p ezno-parser --example code_blocks_to_script all.md --comment-headers --out ./all.tsx
./target/release/ezno check all.tsx --timings || true
- hyperfine -i './target/release/ezno check all.tsx'
+
+ hyperfine -i -L compiler ${{ steps.compilers.outputs.BINARIES }} '{compiler} check all.tsx'
echo "::endgroup::"
- name: Run checker performance on large file
@@ -109,23 +155,58 @@ jobs:
done
./target/release/ezno check large.tsx --timings --max-diagnostics 0 || true
- hyperfine -i './target/release/ezno check large.tsx'
+
+ hyperfine -i -L compiler ${{ steps.compilers.outputs.BINARIES }} '{compiler} check large.tsx'
echo "::endgroup::"
+ - name: Valgrind and callgrind
+ shell: bash
+ continue-on-error: true
+ run: |
+ IFS=',' read -ra ITEMS <<< ${{ steps.compilers.outputs.BINARIES }}
+
+ for compiler in ${ITEMS[@]}; do
+ echo "::group::Running $compiler"
+
+ echo "::group::Callgrind"
+ valgrind --tool=callgrind --callgrind-out-file=cpu-out $compiler check demo.tsx | true
+ echo "CPU usage:"
+ head -n100 cpu-out
+ echo "::endgroup::"
+
+ echo "::group::Valgrind"
+ valgrind --log-file=memory-out $compiler check demo.tsx | true
+ echo "Memory usage:"
+ cat memory-out
+ echo "::endgroup::"
+
+ echo "::endgroup::"
+ done
+
- name: Run parsing & stringing (minfied) benchmarks
shell: bash
+ continue-on-error: true
run: |
strings=(
"https://esm.sh/v128/react-dom@18.2.0/es2022/react-dom.mjs"
+ "https://esm.sh/v135/typescript@5.3.3/es2022/typescript.mjs"
)
- # Currently broken "https://esm.sh/v135/typescript@5.3.3/es2022/typescript.mjs"
for url in "${strings[@]}"; do
- curl -sS $url > input.js
- echo "--- debug: $url ---"
- cargo run -p ezno-parser --example parse input.js --timings --render-timings
- echo "--- release: $url ---"
- cargo run -p ezno-parser --release --example parse input.js --timings --render-timings
-
- hyperfine "./target/debug/examples/parse input.js" "./target/release/examples/parse input.js"
+ # TODO copy expression
+ curl -sS $url > input.js
+
+ echo "::group::Comparison"
+ hyperfine \
+ -L compiler ${{ steps.compilers.outputs.BINARIES }} \
+ '{compiler} ast-explorer full input.js --timings'
+ echo "::endgroup::"
done
+
+ - name: Upload checker
+ if: github.ref == 'main'
+ uses: actions/upload-artifact@v4
+ with:
+ name: latest-checker
+ path: target/release/ezno
+ retention-days: 90
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 193a1505..f7411899 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -58,30 +58,20 @@ jobs:
echo "publish-json-args=$KEY_PAIR_ARGS_JSON" >> $GITHUB_OUTPUT
shell: bash
- - id: get-sponsors
+ # Needed for WASM
+ - name: Get sponsors and contributors
+ id: get-sponsors-and-contributors
run: |
- SPONSORS=$(
- gh api graphql -f query='{
- user(login: "kaleidawave") {
- sponsorshipsAsMaintainer(first: 100, activeOnly: false) {
- edges {
- node {
- sponsor {
- name, login
- }
- }
- }
- }
- }
- }' -q '.data.user.sponsorshipsAsMaintainer.edges | map(.node.sponsor.name // .node.sponsor.login) | join(",")'
- )
-
- CONTRIBUTORS=$(
- gh pr list --state merged --json author | jq 'map(.author.name // .author.login) | unique | join(",")' --raw-output
- )
-
- echo "SPONSORS=$SPONSORS" >> $GITHUB_OUTPUT
- echo "CONTRIBUTORS=$CONTRIBUTORS" >> $GITHUB_OUTPUT
+ SQP='.data.user.sponsorshipsAsMaintainer.edges | map(.node.sponsor.name // .node.sponsor.login) | join(",")'
+ GQL_SQ='{ user(login: "kaleidawave") { sponsorshipsAsMaintainer(first: 100, activeOnly: false) { edges { node { sponsor { name, login } } } } } }'
+ SPONSORS=$(gh api graphql -f query="$GQL_SQ" -q "$SQP")
+
+ CQP='map(.author.name // .author.login) | unique | join(",")'
+ CONTRIBUTORS=$(gh pr list --state merged --json author | jq "$CQP" --raw-output)
+
+ echo "SPONSORS=$SPONSORS" >> "$GITHUB_OUTPUT"
+ echo "CONTRIBUTORS=$CONTRIBUTORS" >> "$GITHUB_OUTPUT"
+ echo "::notice::CONTRIBUTORS=$CONTRIBUTORS and SPONSORS=$SPONSORS"
shell: bash
env:
@@ -94,8 +84,8 @@ jobs:
version: ${{ steps.set-arguments.outputs.publish-json-args }}
crates-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
env:
- SPONSORS: ${{ steps.get-sponsors.outputs.SPONSORS }}
- CONTRIBUTORS: ${{ steps.get-sponsors.outputs.CONTRIBUTORS }}
+ SPONSORS: ${{ steps.get-sponsors-and-contributors.outputs.SPONSORS }}
+ CONTRIBUTORS: ${{ steps.get-sponsors-and-contributors.outputs.CONTRIBUTORS }}
- name: Add WASM to rustup
if: ${{ inputs.ezno-version != 'none' }}
@@ -115,7 +105,8 @@ jobs:
ls dist
working-directory: src/js-cli-and-library
env:
- SPONSORS: ${{ steps.get-sponsors.outputs.sponsors }}
+ SPONSORS: ${{ steps.get-sponsors-and-contributors.outputs.SPONSORS }}
+ CONTRIBUTORS: ${{ steps.get-sponsors-and-contributors.outputs.CONTRIBUTORS }}
- name: NPM publish (CLI and library)
if: ${{ inputs.ezno-version != 'none' }}
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index 9a2efc66..e45c83c5 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -88,6 +88,7 @@ jobs:
run: |
cargo test
+ # TODO more
curl https://esm.sh/v128/react-dom@18.2.0/es2022/react-dom.mjs > react.js
cargo run -p ezno-parser --example parse react.js
working-directory: parser
@@ -95,12 +96,19 @@ jobs:
- name: Run checker specification
if: (steps.changes.outputs.checker == 'true' && github.event_name != 'pull_request') || github.ref_name == 'main'
run: cargo test
- working-directory: checker/specification
- name: Run checker specification (w/ staging)
if: steps.changes.outputs.checker == 'true' && github.event_name == 'pull_request'
- run: cargo test -F staging
- working-directory: checker/specification
+ run: cargo test -F staging -p ezno-checker-specification
+ env:
+ EZNO_DEBUG: 1
+
+ - name: Run checker specification (just to implement)
+ continue-on-error: true
+ if: steps.changes.outputs.checker == 'true' && github.event_name == 'pull_request'
+ run: |
+ # Aim of this test is to catch anything that may have been fixed in this next commit or any bad regressions (stack overflows)
+ cargo test --no-default-features -F to_implement -p ezno-checker-specification
env:
EZNO_DEBUG: 1
@@ -108,10 +116,9 @@ jobs:
if: steps.changes.outputs.checker == 'true' || github.ref_name == 'main'
run: |
# Test checker with the parser features
- cargo test -F ezno-parser
- working-directory: checker
+ cargo test -F ezno-parser -p ezno-checker
- - name: Run base tests
+ - name: Run CLI and base tests
run: cargo test
extras:
@@ -126,7 +133,7 @@ jobs:
with:
path: ${{ env.CACHE_PATHS }}
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
-
+
- uses: dorny/paths-filter@v3
id: changes
with:
@@ -145,7 +152,7 @@ jobs:
- uses: actions/setup-node@v4
if: steps.changes.outputs.src == 'true' || github.ref_name == 'main'
with:
- node-version: 18
+ node-version: 23
- name: Check parser without extras
if: steps.changes.outputs.parser == 'true'
@@ -167,17 +174,19 @@ jobs:
- name: Build and test WASM
if: steps.changes.outputs.src == 'true' || github.ref_name == 'main'
+ timeout-minutes: 5
run: |
# TODO `cargo check --target wasm32-unknown-unknown --lib` might be good enough
rustup target add wasm32-unknown-unknown
npm ci
npm run build
- npm run run-tests
node ./dist/cli.cjs info
deno run -A ./dist/cli.mjs info
+ npm run run-tests
+
npx -p typescript tsc --strict --pretty ./build/ezno_lib.d.ts
echo "debug checked with TSC"
cargo run -p ezno-parser --example parse ./build/ezno_lib.d.ts --type-definition-module
@@ -190,6 +199,7 @@ jobs:
working-directory: src/js-cli-and-library
shell: bash
+ # WIP
- uses: actions/upload-artifact@v4
if: steps.changes.outputs.src == 'true' || github.ref_name == 'main'
with:
@@ -198,6 +208,7 @@ jobs:
retention-days: 3
fuzzing_parser:
+ if: ${{ github.ref == 'main' || !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'fuzz-me') }}
needs: validity
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -255,6 +266,7 @@ jobs:
working-directory: parser/fuzz
fuzzing_checker:
+ if: ${{ github.ref == 'main' || !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'fuzz-me') }}
needs: validity
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -347,3 +359,15 @@ jobs:
fi
done
shell: bash
+
+ performance-and-size:
+ # WIP
+ runs-on: ubuntu-latest
+ needs: validity
+ steps:
+ - uses: actions/checkout@v4
+ - name: Kick off other workflow if the PR has a label
+ if: github.ref_name != 'main' && contains(github.event.pull_request.labels.*.name, 'compiler-performance')
+ run: gh workflow run performance-and-size.yml --ref "${{ github.head_ref }}"
+ env:
+ GH_TOKEN: ${{ github.token }}
diff --git a/Cargo.lock b/Cargo.lock
index fcfcbdfc..b6be9713 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -39,7 +39,7 @@ dependencies = [
"argh_shared",
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
@@ -53,9 +53,9 @@ dependencies = [
[[package]]
name = "autocfg"
-version = "1.3.0"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
+checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "base64"
@@ -90,15 +90,15 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bytemuck"
-version = "1.17.1"
+version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "773d90827bc3feecfb67fab12e24de0749aad83c74b9504ecde46237b5cd24e2"
+checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d"
[[package]]
name = "cc"
-version = "1.1.15"
+version = "1.1.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6"
+checksum = "baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70"
dependencies = [
"shlex",
]
@@ -190,7 +190,7 @@ checksum = "42e5ddace13a8459cb452b19e01f59f16d3e2049c8b808f338a13eeadc326e33"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
@@ -211,7 +211,7 @@ dependencies = [
"either_n",
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
@@ -266,7 +266,7 @@ dependencies = [
"proc-macro2",
"quote",
"string-cases",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
@@ -384,15 +384,6 @@ dependencies = [
"syn-helpers",
]
-[[package]]
-name = "fastrand"
-version = "1.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
-dependencies = [
- "instant",
-]
-
[[package]]
name = "fastrand"
version = "2.1.1"
@@ -511,15 +502,6 @@ dependencies = [
"libc",
]
-[[package]]
-name = "instant"
-version = "0.1.13"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
-dependencies = [
- "cfg-if",
-]
-
[[package]]
name = "iterator-endiate"
version = "0.2.1"
@@ -575,9 +557,9 @@ checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760"
[[package]]
name = "libc"
-version = "0.2.158"
+version = "0.2.161"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
+checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
[[package]]
name = "libredox"
@@ -727,15 +709,15 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.19.0"
+version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "openssl"
-version = "0.10.66"
+version = "0.10.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1"
+checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5"
dependencies = [
"bitflags 2.6.0",
"cfg-if",
@@ -754,7 +736,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
@@ -765,9 +747,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
-version = "0.9.103"
+version = "0.9.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6"
+checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741"
dependencies = [
"cc",
"libc",
@@ -777,9 +759,9 @@ dependencies = [
[[package]]
name = "ordered-float"
-version = "4.2.2"
+version = "4.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4a91171844676f8c7990ce64959210cd2eaef32c2612c50f9fae9f8aaa6065a6"
+checksum = "c65ee1f9701bf938026630b455d5315f490640234259037edb259798b3bcf85e"
dependencies = [
"num-traits",
]
@@ -833,15 +815,15 @@ dependencies = [
[[package]]
name = "pkg-config"
-version = "0.3.30"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
+checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
[[package]]
name = "pretty_assertions"
-version = "1.4.0"
+version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
+checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d"
dependencies = [
"diff",
"yansi",
@@ -849,9 +831,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.86"
+version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
+checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e"
dependencies = [
"unicode-ident",
]
@@ -867,18 +849,18 @@ dependencies = [
[[package]]
name = "redox_syscall"
-version = "0.5.3"
+version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
+checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f"
dependencies = [
"bitflags 2.6.0",
]
[[package]]
name = "regress"
-version = "0.10.0"
+version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "16fe0a24af5daaae947294213d2fd2646fbf5e1fbacc1d4ba3e84b2393854842"
+checksum = "1541daf4e4ed43a0922b7969bdc2170178bcacc5dabf7e39bc508a9fa3953a7a"
dependencies = [
"hashbrown",
"memchr",
@@ -886,18 +868,18 @@ dependencies = [
[[package]]
name = "rgb"
-version = "0.8.48"
+version = "0.8.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f86ae463694029097b846d8f99fd5536740602ae00022c0c50c5600720b2f71"
+checksum = "57397d16646700483b67d2dd6511d79318f9d057fdbd21a4066aeac8b41d310a"
dependencies = [
"bytemuck",
]
[[package]]
name = "rustix"
-version = "0.38.35"
+version = "0.38.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a85d50532239da68e9addb745ba38ff4612a242c1c7ceea689c4bc7c2f43c36f"
+checksum = "375116bee2be9ed569afe2154ea6a99dfdffd257f533f187498c2a8f5feaf4ee"
dependencies = [
"bitflags 2.6.0",
"errno",
@@ -923,11 +905,11 @@ dependencies = [
[[package]]
name = "schannel"
-version = "0.1.23"
+version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534"
+checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1"
dependencies = [
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
]
[[package]]
@@ -951,9 +933,9 @@ dependencies = [
[[package]]
name = "security-framework-sys"
-version = "2.11.1"
+version = "2.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf"
+checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6"
dependencies = [
"core-foundation-sys",
"libc",
@@ -961,13 +943,13 @@ dependencies = [
[[package]]
name = "self-replace"
-version = "1.4.0"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7828a58998685d8bf5a3c5e7a3379a5867289c20828c3ee436280b44b598515"
+checksum = "03ec815b5eab420ab893f63393878d89c90fdd94c0bcc44c07abb8ad95552fb7"
dependencies = [
- "fastrand 1.9.0",
+ "fastrand",
"tempfile",
- "windows-sys 0.48.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -992,9 +974,9 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.209"
+version = "1.0.214"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09"
+checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5"
dependencies = [
"serde_derive",
]
@@ -1012,13 +994,13 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.209"
+version = "1.0.214"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170"
+checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
@@ -1029,14 +1011,14 @@ checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
name = "serde_json"
-version = "1.0.127"
+version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad"
+checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
dependencies = [
"itoa",
"memchr",
@@ -1094,9 +1076,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.76"
+version = "2.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525"
+checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
dependencies = [
"proc-macro2",
"quote",
@@ -1112,17 +1094,17 @@ dependencies = [
"either_n",
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
name = "tempfile"
-version = "3.12.0"
+version = "3.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
+checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b"
dependencies = [
"cfg-if",
- "fastrand 2.1.1",
+ "fastrand",
"once_cell",
"rustix",
"windows-sys 0.59.0",
@@ -1174,20 +1156,20 @@ dependencies = [
"proc-macro2",
"quote",
"serde_derive_internals",
- "syn 2.0.76",
+ "syn 2.0.87",
]
[[package]]
name = "unicode-ident"
-version = "1.0.12"
+version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
[[package]]
name = "unicode-width"
-version = "0.1.13"
+version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d"
+checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "vcpkg"
@@ -1238,7 +1220,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
"wasm-bindgen-shared",
]
@@ -1282,7 +1264,7 @@ checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -1495,9 +1477,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "yansi"
-version = "0.5.1"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
+checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]]
name = "zerocopy"
@@ -1516,5 +1498,5 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.76",
+ "syn 2.0.87",
]
diff --git a/Cargo.toml b/Cargo.toml
index 8e1d21e6..7f0a5fd1 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -12,7 +12,7 @@ members = [
[package]
name = "ezno"
-description = "A JavaScript type checker and compiler. For use as a library or through the CLI"
+description = "A fast and correct TypeScript type checker and compiler with additional experiments. For use as a library or through the CLI"
authors = ["Ben "]
version = "0.0.22"
edition = "2021"
@@ -48,6 +48,7 @@ pretty_assertions = "1.3.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
simple-json-parser = "0.0.2"
+js-sys = "0.3"
[target.'cfg(not(target_family = "wasm"))'.dependencies]
# For updating binary
diff --git a/README.md b/README.md
index 1a174c96..27dfc88b 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-A JavaScript compiler and TypeScript checker written in Rust with a focus on static analysis and runtime performance.
+A fast and correct TypeScript type checker and compiler with additional experiments
> [!IMPORTANT]
> Ezno is in active development and **does not currently support enough features to check existing projects** (see [blocking issues](https://github.com/kaleidawave/ezno/labels/blocking)). Check out the [getting started guide](./checker/documentation/getting-started.md) for experimenting with what it [currently supports](./checker/specification/specification.md).
@@ -28,6 +28,7 @@ Read more about Ezno (in chronological order)
- [Ezno in '23](https://kaleidawave.github.io/posts/ezno-23/)
- [A preview of the checker](https://kaleidawave.github.io/posts/a-preview-of-the-checker/)
- [The quest continues](https://kaleidawave.github.io/posts/the-quest-continues/)
+- [Sets, types and type checking](https://kaleidawave.github.io/posts/sets-types-and-type-checking/) (*general post*)
---
diff --git a/checker/Cargo.toml b/checker/Cargo.toml
index 242c5608..2461cae6 100644
--- a/checker/Cargo.toml
+++ b/checker/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "ezno-checker"
-description = "A type checker for JavaScript"
+description = "A fast and correct TypeScript type checker with additional experiments"
version = "0.0.17"
license = "MIT"
repository = "https://github.com/kaleidawave/ezno"
diff --git a/checker/examples/run_checker.rs b/checker/examples/run_checker.rs
index aab40341..124c86e8 100644
--- a/checker/examples/run_checker.rs
+++ b/checker/examples/run_checker.rs
@@ -52,7 +52,7 @@ fn main() {
let result = check_project::<_, synthesis::EznoParser>(
entry_points,
type_definition_files,
- resolver,
+ &resolver,
options,
(),
None,
diff --git a/checker/fuzz/fuzz_targets/check_project_naive.rs b/checker/fuzz/fuzz_targets/check_project_naive.rs
index 6a6039d9..014b25ea 100644
--- a/checker/fuzz/fuzz_targets/check_project_naive.rs
+++ b/checker/fuzz/fuzz_targets/check_project_naive.rs
@@ -19,7 +19,7 @@ fn do_fuzz(data: &str) -> Corpus {
let _result = check_project::<_, synthesis::EznoParser>(
vec![root.into()],
type_definition_files,
- |_path: &std::path::Path| Some(input.to_owned()),
+ &|_path: &std::path::Path| Some(input.to_owned()),
options,
(),
None,
diff --git a/checker/specification/build.rs b/checker/specification/build.rs
index e31236a4..1b20193d 100644
--- a/checker/specification/build.rs
+++ b/checker/specification/build.rs
@@ -19,14 +19,18 @@ fn main() -> Result<(), Box> {
if cfg!(feature = "staging") {
let staging = read_to_string("./staging.md")?;
- writeln!(&mut out, "mod staging {{ use super::check_errors; ").unwrap();
+ writeln!(&mut out, "mod staging {{ ").unwrap();
+ writeln!(&mut out, "use super::{{check_expected_diagnostics, TypeCheckOptions}}; ")
+ .unwrap();
markdown_lines_append_test_to_rust(staging.lines().enumerate(), &mut out)?;
writeln!(&mut out, "}}").unwrap();
}
if cfg!(feature = "all") {
let to_implement = read_to_string("./to_implement.md")?;
- writeln!(&mut out, "mod to_implement {{ use super::check_errors; ").unwrap();
+ writeln!(&mut out, "mod to_implement {{ ").unwrap();
+ writeln!(&mut out, "use super::{{check_expected_diagnostics, TypeCheckOptions}}; ")
+ .unwrap();
markdown_lines_append_test_to_rust(to_implement.lines().enumerate(), &mut out)?;
writeln!(&mut out, "}}").unwrap();
}
@@ -60,8 +64,20 @@ fn markdown_lines_append_test_to_rust(
let heading = line.strip_prefix("####").unwrap().trim_start();
let test_title = heading_to_rust_identifier(heading);
- let blocks = {
- let mut blocks = Vec::new();
+ pub struct File<'a> {
+ path: &'a str,
+ code: String,
+ }
+
+ // pub struct Block {
+ // /// Vec for FS tests
+ // files: Vec,
+ // expected_diagnostics: Vec,
+ // options: Vec
+ // }
+
+ let files = {
+ let mut files = Vec::::new();
let mut current_filename = None;
for (_, line) in lines.by_ref() {
// Also handles TSX
@@ -74,10 +90,10 @@ fn markdown_lines_append_test_to_rust(
for (_, line) in lines.by_ref() {
if let Some(path) = line.strip_prefix("// in ") {
if !code.trim().is_empty() {
- blocks.push((
- current_filename.unwrap_or(DEFAULT_FILE_PATH),
- mem::take(&mut code),
- ));
+ files.push(File {
+ path: current_filename.unwrap_or(DEFAULT_FILE_PATH),
+ code: mem::take(&mut code),
+ });
}
current_filename = Some(path);
continue;
@@ -88,40 +104,64 @@ fn markdown_lines_append_test_to_rust(
code.push_str(line);
code.push('\n')
}
- blocks.push((current_filename.unwrap_or(DEFAULT_FILE_PATH), code));
- blocks
+ files.push(File { path: current_filename.unwrap_or(DEFAULT_FILE_PATH), code });
+ files
};
- let errors = {
- let mut errors = Vec::new();
+
+ let (expected_diagnostics, options) = {
+ let mut expected_diagnostics = Vec::new();
+ let mut options = None::>;
for (_, line) in lines.by_ref() {
- if line.starts_with("#") {
+ if let (Some(args), false) = (line.strip_prefix("With "), options.is_some()) {
+ options = Some(args.split(',').collect());
+ } else if line.starts_with("#") {
panic!("block with no diagnostics or break between in {test_title}")
- } else if line.starts_with('-') {
- let error =
- line.strip_prefix("- ").unwrap().replace('\\', "").replace('"', "\\\"");
- errors.push(format!("\"{}\"", error))
- } else if !errors.is_empty() {
+ } else if let Some(diagnostic) = line.strip_prefix("-") {
+ let error = diagnostic.trim().replace('\\', "").replace('"', "\\\"");
+ expected_diagnostics.push(format!("\"{}\"", error))
+ } else if !expected_diagnostics.is_empty() {
break;
}
}
- errors
+ (expected_diagnostics, options)
};
- let errors = errors.join(", ");
+ let expected_diagnostics = expected_diagnostics.join(", ");
let heading_idx = heading_idx + 1;
- let code = blocks
+ // TODO don't allocate
+ let code_as_list = files
.into_iter()
- .map(|(path, content)| format!("(\"{path}\",r#\"{content}\"#),"))
- .fold(String::new(), |mut acc, cur| {
- acc.push_str(&cur);
+ .map(|File { path, code }| format!("(\"{path}\",r#\"{code}\"#),"))
+ .reduce(|mut acc, slice| {
+ acc.push_str(&slice);
acc
- });
+ })
+ .unwrap();
+
+ let options = if let Some(options) = options {
+ let arguments = options
+ .into_iter()
+ .map(|value| format!("{value}: true"))
+ .reduce(|mut acc, slice| {
+ acc.push_str(&slice);
+ acc.push_str(", ");
+ acc
+ })
+ .unwrap();
+ format!("Some(super::TypeCheckOptions {{ {arguments}, ..super::TypeCheckOptions::default() }})")
+ } else {
+ format!("None")
+ };
writeln!(
out,
"#[test] fn {test_title}() {{
- super::check_errors(\"{heading}\", {heading_idx}, &[{code}], &[{errors}])
+ super::check_expected_diagnostics(
+ \"{heading}\", {heading_idx},
+ &[{code_as_list}], &[{expected_diagnostics}],
+ {options}
+ )
}}",
)?;
}
@@ -136,6 +176,6 @@ fn heading_to_rust_identifier(heading: &str) -> String {
heading
.replace("...", "")
.replace([' ', '-', '/', '.', '+'], "_")
- .replace(['*', '\'', '`', '"', '&', '!', '(', ')', ','], "")
+ .replace(['*', '\'', '`', '"', '&', '!', '(', ')', ',', ':'], "")
.to_lowercase()
}
diff --git a/checker/specification/specification.md b/checker/specification/specification.md
index 60dbc932..88a305b0 100644
--- a/checker/specification/specification.md
+++ b/checker/specification/specification.md
@@ -3684,18 +3684,6 @@ box(someNumber) satisfies boolean;
- Expected string, found number
- Expected boolean, found { item: number }
-#### Template literal type restriction
-
-```ts
-type Name = "Ben"
-"test" satisfies `Hello ${Name}`;
-"Hello Ben" satisfies `Hello ${Name}`;
-```
-
-> Should be `Expected "Hello Ben", found "test"`. See #188
-
-- Expected `Hello ${Name}`, found \"test\"
-
#### Template literal type specialisation
> Uses `+` logic behind the scenes
diff --git a/checker/specification/test.rs b/checker/specification/test.rs
index 85166a18..b98dd727 100644
--- a/checker/specification/test.rs
+++ b/checker/specification/test.rs
@@ -11,13 +11,14 @@ use checker::{
diagnostics,
source_map::{Nullable, SourceId},
synthesis::EznoParser,
+ TypeCheckOptions,
};
// This is here as it is used in the included `/specification.rs`
use parser::ASTNode;
mod specification {
- use super::check_errors;
+ use super::{check_expected_diagnostics, TypeCheckOptions};
// from build.rs
include!(concat!(env!("OUT_DIR"), "/specification.rs"));
@@ -37,12 +38,13 @@ const SIMPLE_DTS: Option<&str> = None;
const IN_CI: bool = option_env!("CI").is_some();
/// Called by each test
-fn check_errors(
+fn check_expected_diagnostics(
heading: &'static str,
- _line: usize,
+ line: usize,
// (Path, Content)
code: &[(&'static str, &'static str)],
expected_diagnostics: &[&'static str],
+ type_check_options: Option,
) {
// let global_buffer = Arc::new(Mutex::new(String::new()));
// let old_panic_hook = panic::take_hook();
@@ -59,10 +61,7 @@ fn check_errors(
// })
// });
- // TODO could test these
- let type_check_options = Default::default();
-
- // eprintln!("{:?}", code);
+ let type_check_options = type_check_options.unwrap_or_default();
// let result = panic::catch_unwind(|| {
@@ -95,7 +94,7 @@ fn check_errors(
let result = checker::check_project::<_, EznoParser>(
vec![PathBuf::from("main.tsx")],
type_definition_files,
- resolver,
+ &resolver,
type_check_options,
(),
None,
@@ -125,7 +124,7 @@ fn check_errors(
if diagnostics != expected_diagnostics {
panic!(
- "{}",
+ "In '{heading}' on line {line}, found\n{}",
pretty_assertions::Comparison::new(expected_diagnostics, &diagnostics).to_string()
)
}
diff --git a/checker/src/context/invocation.rs b/checker/src/context/invocation.rs
index b7218ade..c20fa8da 100644
--- a/checker/src/context/invocation.rs
+++ b/checker/src/context/invocation.rs
@@ -69,7 +69,7 @@ pub struct InvocationContext(Vec);
/// TODO want to have type arguments on each of these
pub(crate) enum InvocationKind {
- Conditional(LocalInformation),
+ Conditional(Box),
/// *Unconditional*
///
/// TODO does this need [`LocalInformation`]??
@@ -89,15 +89,13 @@ impl CallCheckingBehavior for InvocationContext {
self.0
.iter_mut()
.rev()
- .find_map(
- |kind| {
- if let InvocationKind::Conditional(info) = kind {
- Some(info)
- } else {
- None
- }
- },
- )
+ .find_map(|kind| -> Option<&mut LocalInformation> {
+ if let InvocationKind::Conditional(info) = kind {
+ Some(&mut *info)
+ } else {
+ None
+ }
+ })
.unwrap_or(&mut environment.info)
}
@@ -142,10 +140,10 @@ impl InvocationContext {
&mut self,
cb: impl for<'a> FnOnce(&'a mut InvocationContext) -> T,
) -> (LocalInformation, T) {
- self.0.push(InvocationKind::Conditional(LocalInformation::default()));
+ self.0.push(InvocationKind::Conditional(Box::default()));
let result = cb(self);
if let Some(InvocationKind::Conditional(info)) = self.0.pop() {
- (info, result)
+ (*info, result)
} else {
unreachable!()
}
diff --git a/checker/src/diagnostics.rs b/checker/src/diagnostics.rs
index 94cecf0a..78943ade 100644
--- a/checker/src/diagnostics.rs
+++ b/checker/src/diagnostics.rs
@@ -114,18 +114,18 @@ pub struct DiagnosticsContainer {
diagnostics: Vec,
// Quick way to check whether a error was added
#[cfg_attr(feature = "serde-serialize", serde(skip_serializing))]
- has_error: bool,
+ contains_error: bool,
}
// TODO the add methods are the same...
impl DiagnosticsContainer {
#[must_use]
pub fn new() -> Self {
- Self { diagnostics: Default::default(), has_error: false }
+ Self { diagnostics: Default::default(), contains_error: false }
}
pub fn add_error>(&mut self, error: T) {
- self.has_error = true;
+ self.contains_error = true;
self.diagnostics.push(error.into());
}
@@ -138,8 +138,8 @@ impl DiagnosticsContainer {
}
#[must_use]
- pub fn has_error(&self) -> bool {
- self.has_error
+ pub fn contains_error(&self) -> bool {
+ self.contains_error
}
pub fn sources(&self) -> impl Iterator- + '_ {
@@ -153,7 +153,7 @@ impl DiagnosticsContainer {
}
pub fn into_result(self) -> Result {
- if self.has_error {
+ if self.contains_error {
Err(self)
} else {
Ok(self)
diff --git a/checker/src/features/regexp.rs b/checker/src/features/regexp.rs
index 7797cef9..297d0334 100644
--- a/checker/src/features/regexp.rs
+++ b/checker/src/features/regexp.rs
@@ -15,7 +15,7 @@ pub struct RegExp {
source: String,
re: Regex,
groups: u32,
- named_group_indices: crate::Map,
+ group_names: Vec>,
flags_unsupported: bool,
used: bool,
}
@@ -65,13 +65,12 @@ impl RegExp {
// let start_pred = compiled_regex.start_pred;
// let loops = compiled_regex.loops;
let groups = compiled_regex.groups + 1;
- let named_group_indices =
- compiled_regex.named_group_indices.iter().map(|(l, r)| (l.clone(), *r)).collect();
+ let group_names = compiled_regex.group_names.to_vec();
// let flags = compiled_regex.flags;
let re = Regex::from(compiled_regex);
- Ok(Self { source, re, groups, named_group_indices, flags_unsupported, used: false })
+ Ok(Self { source, re, groups, group_names, flags_unsupported, used: false })
}
#[must_use]
@@ -262,7 +261,7 @@ impl RegExp {
&mut environment.info,
);
- for name in self.named_group_indices.keys() {
+ for name in &self.group_names {
let key = PropertyKey::String(name.to_string().into());
named_groups_object.append(
diff --git a/checker/src/lib.rs b/checker/src/lib.rs
index 39e60306..d7ee7802 100644
--- a/checker/src/lib.rs
+++ b/checker/src/lib.rs
@@ -459,13 +459,13 @@ impl CheckOutput {
pub fn check_project(
entry_points: Vec,
type_definition_files: Vec,
- resolver: T,
+ resolver: &T,
options: TypeCheckOptions,
parser_requirements: A::ParserRequirements,
existing_files: Option>,
) -> CheckOutput {
let mut checking_data =
- CheckingData::::new(options, &resolver, existing_files, parser_requirements);
+ CheckingData::::new(options, resolver, existing_files, parser_requirements);
let mut root = crate::context::RootContext::new_with_primitive_references();
@@ -478,7 +478,7 @@ pub fn check_project(
add_definition_files_to_root(type_definition_files, &mut root, &mut checking_data);
crate::utilities::unpause_debug_mode();
- if checking_data.diagnostics_container.has_error() {
+ if checking_data.diagnostics_container.contains_error() {
return CheckOutput {
types: checking_data.types,
module_contents: checking_data.modules.files,
@@ -753,7 +753,7 @@ pub fn generate_cache(
add_definition_files_to_root(vec![on.to_path_buf()], &mut root, &mut checking_data);
assert!(
- !checking_data.diagnostics_container.has_error(),
+ !checking_data.diagnostics_container.contains_error(),
"found error in definition file {:#?}",
checking_data.diagnostics_container.get_diagnostics()
);
diff --git a/checker/src/synthesis/interactive.rs b/checker/src/synthesis/interactive.rs
new file mode 100644
index 00000000..54f8a620
--- /dev/null
+++ b/checker/src/synthesis/interactive.rs
@@ -0,0 +1,90 @@
+/// For the REPL in Ezno's CLI
+use std::{mem, path::PathBuf};
+
+use source_map::{FileSystem, MapFileStore, SourceId, WithPathMap};
+
+use crate::{
+ add_definition_files_to_root, types::printing::print_type, CheckingData, DiagnosticsContainer,
+ RootContext, TypeId,
+};
+
+use super::{block::synthesise_block, expressions::synthesise_multiple_expression};
+
+pub struct State<'a, T: crate::ReadFromFS> {
+ checking_data: CheckingData<'a, T, super::EznoParser>,
+ root: RootContext,
+ source: SourceId,
+}
+
+impl<'a, T: crate::ReadFromFS> State<'a, T> {
+ pub fn new(
+ resolver: &'a T,
+ type_definition_files: Vec,
+ ) -> Result)> {
+ let mut root = RootContext::new_with_primitive_references();
+ let mut checking_data =
+ CheckingData::new(Default::default(), resolver, Default::default(), ());
+
+ add_definition_files_to_root(type_definition_files, &mut root, &mut checking_data);
+
+ if checking_data.diagnostics_container.contains_error() {
+ Err((checking_data.diagnostics_container, checking_data.modules.files))
+ } else {
+ let source =
+ checking_data.modules.files.new_source_id("CLI.tsx".into(), String::default());
+ Ok(Self { checking_data, root, source })
+ }
+ }
+
+ pub fn check_item(
+ &mut self,
+ item: &parser::Module,
+ ) -> Result<(Option, DiagnosticsContainer), DiagnosticsContainer> {
+ let (ty, ..) = self.root.new_lexical_environment_fold_into_parent(
+ crate::Scope::PassThrough { source: self.source },
+ &mut self.checking_data,
+ |environment, checking_data| {
+ if let Some(parser::StatementOrDeclaration::Statement(
+ parser::Statement::Expression(expression),
+ )) = item.items.last()
+ {
+ synthesise_block(
+ &item.items[..(item.items.len() - 1)],
+ environment,
+ checking_data,
+ );
+ let result = synthesise_multiple_expression(
+ expression,
+ environment,
+ checking_data,
+ TypeId::ANY_TYPE,
+ );
+ Some(print_type(result, &checking_data.types, environment, false))
+ } else {
+ synthesise_block(&item.items, environment, checking_data);
+ None
+ }
+ },
+ );
+ let dc = mem::take(&mut self.checking_data.diagnostics_container);
+ if dc.contains_error() {
+ Err(dc)
+ } else {
+ Ok((ty, dc))
+ }
+ }
+
+ #[must_use]
+ pub fn get_source_id(&self) -> SourceId {
+ self.source
+ }
+
+ #[must_use]
+ pub fn get_fs_ref(&self) -> &MapFileStore {
+ &self.checking_data.modules.files
+ }
+
+ pub fn get_fs_mut(&mut self) -> &mut MapFileStore {
+ &mut self.checking_data.modules.files
+ }
+}
diff --git a/checker/src/synthesis/interfaces.rs b/checker/src/synthesis/interfaces.rs
index 12863cd4..fed3dc65 100644
--- a/checker/src/synthesis/interfaces.rs
+++ b/checker/src/synthesis/interfaces.rs
@@ -58,7 +58,7 @@ pub(crate) enum InterfaceKey<'a> {
}
pub(crate) enum InterfaceValue {
- Function(FunctionType, Option),
+ Function(Box, Option),
Value(TypeId),
}
@@ -90,16 +90,16 @@ fn register(
let value = match value {
InterfaceValue::Function(function, getter_setter) => match getter_setter {
Some(GetterSetter::Getter) => PropertyValue::Getter(Callable::new_from_function(
- function,
+ *function,
&mut checking_data.types,
)),
Some(GetterSetter::Setter) => PropertyValue::Setter(Callable::new_from_function(
- function,
+ *function,
&mut checking_data.types,
)),
None => {
let function_id = function.id;
- checking_data.types.functions.insert(function.id, function);
+ checking_data.types.functions.insert(function.id, *function);
let ty = Type::FunctionReference(function_id);
PropertyValue::Value(checking_data.types.register_type(ty))
}
@@ -232,7 +232,7 @@ pub(super) fn synthesise_signatures {
- checking_data: CheckingData<'a, T, super::EznoParser>,
- root: RootContext,
- source: SourceId,
- }
-
- impl<'a, T: crate::ReadFromFS> State<'a, T> {
- pub fn new(
- resolver: &'a T,
- type_definition_files: Vec,
- ) -> Result)> {
- let mut root = RootContext::new_with_primitive_references();
- let mut checking_data =
- CheckingData::new(Default::default(), resolver, Default::default(), ());
-
- add_definition_files_to_root(type_definition_files, &mut root, &mut checking_data);
-
- if checking_data.diagnostics_container.has_error() {
- Err((checking_data.diagnostics_container, checking_data.modules.files))
- } else {
- let source =
- checking_data.modules.files.new_source_id("CLI.tsx".into(), String::default());
- Ok(Self { checking_data, root, source })
- }
- }
-
- pub fn check_item(
- &mut self,
- item: &parser::Module,
- ) -> Result<(Option, DiagnosticsContainer), DiagnosticsContainer> {
- let (ty, ..) = self.root.new_lexical_environment_fold_into_parent(
- crate::Scope::PassThrough { source: self.source },
- &mut self.checking_data,
- |environment, checking_data| {
- if let Some(parser::StatementOrDeclaration::Statement(
- parser::Statement::Expression(expression),
- )) = item.items.last()
- {
- synthesise_block(
- &item.items[..(item.items.len() - 1)],
- environment,
- checking_data,
- );
- let result = synthesise_multiple_expression(
- expression,
- environment,
- checking_data,
- TypeId::ANY_TYPE,
- );
- Some(print_type(result, &checking_data.types, environment, false))
- } else {
- synthesise_block(&item.items, environment, checking_data);
- None
- }
- },
- );
- let dc = mem::take(&mut self.checking_data.diagnostics_container);
- if dc.has_error() {
- Err(dc)
- } else {
- Ok((ty, dc))
- }
- }
-
- #[must_use]
- pub fn get_source_id(&self) -> SourceId {
- self.source
- }
-
- #[must_use]
- pub fn get_fs_ref(&self) -> &MapFileStore {
- &self.checking_data.modules.files
- }
-
- pub fn get_fs_mut(&mut self) -> &mut MapFileStore {
- &mut self.checking_data.modules.files
- }
- }
-}
diff --git a/checker/tests/partial_source.rs b/checker/tests/partial_source.rs
index 867b2f96..3b74f56b 100644
--- a/checker/tests/partial_source.rs
+++ b/checker/tests/partial_source.rs
@@ -21,7 +21,7 @@ fn partial_checking() {
let result = check_project::<_, synthesis::EznoParser>(
vec![root.into()],
type_definition_files,
- |_path: &std::path::Path| Some(text.to_owned()),
+ &|_path: &std::path::Path| Some(text.to_owned()),
options,
(),
None,
diff --git a/checker/tests/suggestions.rs b/checker/tests/suggestions.rs
index 4a9f3e41..301b4a52 100644
--- a/checker/tests/suggestions.rs
+++ b/checker/tests/suggestions.rs
@@ -41,7 +41,7 @@ console.log(obj2.proberly);
let result = check_project::<_, ezno_checker::synthesis::EznoParser>(
vec![root.into()],
type_definition_files,
- resolver,
+ &resolver,
options,
(),
None,
diff --git a/checker/tests/type_mappings.rs b/checker/tests/type_mappings.rs
index e6eef489..bccb65c3 100644
--- a/checker/tests/type_mappings.rs
+++ b/checker/tests/type_mappings.rs
@@ -17,7 +17,7 @@ y()";
let result = check_project::<_, synthesis::EznoParser>(
vec![root.into()],
type_definition_files,
- |_path: &std::path::Path| Some(text.to_owned()),
+ &|_path: &std::path::Path| Some(text.to_owned()),
options,
(),
None,
diff --git a/parser/examples/duplicate_block.rs b/parser/examples/duplicate_block.rs
new file mode 100644
index 00000000..21183a2f
--- /dev/null
+++ b/parser/examples/duplicate_block.rs
@@ -0,0 +1,135 @@
+use ezno_parser::{
+ declarations::VariableDeclaration,
+ visiting::{Chain, ImmutableVariableOrProperty, VisitOptions, Visitor, Visitors},
+ ASTNode, Declaration, Expression, Module, StatementOrDeclaration, VariableField,
+};
+use std::collections::{HashMap, HashSet};
+
+struct Offsets {
+ pub offsets: Vec,
+ /// TODO use &str references
+ pub top_level_variables: HashSet,
+ pub top_level_types: HashSet,
+}
+
+/// TODO this could use visting right?
+/// TODO abstract to library
+/// TODO do for funtions and types
+fn get_top_level_identifiers(m: &Module) -> (HashSet, HashSet) {
+ let (mut variables, mut types): (HashSet<_>, HashSet<_>) = Default::default();
+ for item in &m.items {
+ match item {
+ StatementOrDeclaration::Declaration(Declaration::Variable(variable)) => {
+ match variable {
+ VariableDeclaration::ConstDeclaration { declarations, position: _ } => {
+ for declaration in declarations {
+ if let VariableField::Name(identifier) = declaration.name.get_ast_ref()
+ {
+ variables.insert(identifier.as_option_str().unwrap().to_owned());
+ }
+ }
+ }
+ VariableDeclaration::LetDeclaration { declarations, position: _ } => {
+ for declaration in declarations {
+ if let VariableField::Name(identifier) = declaration.name.get_ast_ref()
+ {
+ variables.insert(identifier.as_option_str().unwrap().to_owned());
+ }
+ }
+ }
+ }
+ }
+ StatementOrDeclaration::Declaration(Declaration::Function(function)) => {
+ variables.insert(function.on.name.identifier.as_option_str().unwrap().to_owned());
+ }
+ _ => {}
+ }
+ }
+ (variables, types)
+}
+
+fn main() {
+ let code = "
+let x = 2;
+let y = x + 2;
+let z = 6;
+"
+ .trim();
+
+ // function func() {{ return [x, z] }}
+ let module = Module::from_string(code.into(), Default::default()).unwrap();
+
+ let (top_level_variables, top_level_types) = get_top_level_identifiers(&module);
+
+ let mut visitors = Visitors {
+ expression_visitors: vec![Box::new(NameReferenceFinder)],
+ statement_visitors: Default::default(),
+ variable_visitors: vec![Box::new(NameIndexFinder)],
+ block_visitors: Default::default(),
+ };
+
+ // eprintln!("variables={:#?}", (&top_level_variables, &top_level_types));
+
+ let mut offsets: Offsets =
+ Offsets { offsets: Default::default(), top_level_variables, top_level_types };
+
+ module.visit::(
+ &mut visitors,
+ &mut offsets,
+ &VisitOptions { visit_nested_blocks: true, reverse_statements: false },
+ source_map::Nullable::NULL,
+ );
+
+ // TODO why is this backwards
+ // eprintln!("offsets={:#?}", offsets);
+
+ offsets.offsets.sort_unstable();
+ let mut rest = code.to_owned();
+ for (idx, offset) in offsets.offsets.iter_mut().enumerate().rev() {
+ let current_offset = *offset as usize;
+ rest.insert_str(current_offset, "000");
+ // need to ammed offset now string has been changed
+ *offset += ("000".len() * idx) as u32;
+ }
+ rest.push('\n');
+
+ let mut total = rest.clone();
+ const SIZE: usize = 10;
+ total.reserve(rest.len() * (SIZE - 1));
+
+ for i in 1..SIZE {
+ let name = format!("{:03}", i);
+ for offset in offsets.offsets.iter().copied() {
+ let range = offset as usize..(offset as usize + 3);
+ rest.replace_range(range, &name);
+ }
+
+ total.push_str(&rest);
+ }
+
+ eprintln!("{}", total);
+}
+
+/// TODO this could be collected in the same process as above
+struct NameIndexFinder;
+
+impl<'a> Visitor, Offsets> for NameIndexFinder {
+ fn visit(&mut self, item: &ImmutableVariableOrProperty<'a>, data: &mut Offsets, chain: &Chain) {
+ if chain.len() == 1 && item.get_variable_name().is_some() {
+ data.offsets.push(item.get_position().end);
+ // data.insert(name.to_owned());
+ }
+ }
+}
+
+struct NameReferenceFinder;
+
+impl Visitor for NameReferenceFinder {
+ fn visit(&mut self, item: &Expression, data: &mut Offsets, _chain: &Chain) {
+ if let Expression::VariableReference(name, position) = item {
+ if data.top_level_variables.contains(name) {
+ data.offsets.push(position.end);
+ }
+ }
+ }
+}
diff --git a/parser/examples/parse.rs b/parser/examples/parse.rs
index cd189381..ccd5b852 100644
--- a/parser/examples/parse.rs
+++ b/parser/examples/parse.rs
@@ -1,8 +1,10 @@
-use std::{collections::VecDeque, time::Instant};
+use std::{collections::VecDeque, path::Path, time::Instant};
use ezno_parser::{ASTNode, Comments, Module, ParseOptions, ToStringOptions};
use source_map::FileSystem;
+type Files = source_map::MapFileStore;
+
fn main() -> Result<(), Box> {
let mut args: VecDeque<_> = std::env::args().skip(1).collect();
let path = args.pop_front().ok_or("expected argument")?;
@@ -18,20 +20,18 @@ fn main() -> Result<(), Box> {
let display_keywords = args.iter().any(|item| item == "--keywords");
let extras = args.iter().any(|item| item == "--extras");
let partial_syntax = args.iter().any(|item| item == "--partial");
- let source_maps = args.iter().any(|item| item == "--source-map");
+ let print_source_maps = args.iter().any(|item| item == "--source-map");
let timings = args.iter().any(|item| item == "--timings");
- let render_timings = args.iter().any(|item| item == "--render-timings");
let type_definition_module = args.iter().any(|item| item == "--type-definition-module");
let type_annotations = !args.iter().any(|item| item == "--no-type-annotations");
let top_level_html = args.iter().any(|item| item == "--top-level-html");
+ let parse_imports = args.iter().any(|item| item == "--parse-imports");
let print_ast = args.iter().any(|item| item == "--ast");
- let render_output = args.iter().any(|item| item == "--render");
+ let to_string_output = args.iter().any(|item| item == "--to-string");
let pretty = args.iter().any(|item| item == "--pretty");
- let now = Instant::now();
-
// TODO temp
const STACK_SIZE_MB: usize = 32;
let parse_options = ParseOptions {
@@ -52,14 +52,48 @@ fn main() -> Result<(), Box> {
..ParseOptions::default()
};
- let mut fs = source_map::MapFileStore::::default();
-
- let source = std::fs::read_to_string(path.clone())?;
+ let mut fs = Files::default();
+
+ let to_string_options = to_string_output.then(|| ToStringOptions {
+ expect_markers: true,
+ include_type_annotations: type_annotations,
+ pretty,
+ comments: if pretty { Comments::All } else { Comments::None },
+ // 60 is temp
+ max_line_length: if pretty { 60 } else { u8::MAX },
+ ..Default::default()
+ });
+
+ parse_path(
+ path.as_ref(),
+ timings,
+ parse_imports,
+ &parse_options,
+ print_ast,
+ print_source_maps,
+ &to_string_options,
+ display_keywords,
+ &mut fs,
+ )
+}
+fn parse_path(
+ path: &Path,
+ timings: bool,
+ parse_imports: bool,
+ parse_options: &ParseOptions,
+ print_ast: bool,
+ print_source_maps: bool,
+ to_string_options: &Option,
+ display_keywords: bool,
+ fs: &mut Files,
+) -> Result<(), Box> {
+ let source = std::fs::read_to_string(path)?;
let source_id = fs.new_source_id(path.into(), source.clone());
- eprintln!("parsing {:?} bytes", source.len());
- let result = Module::from_string_with_options(source.clone(), parse_options, None);
+ eprintln!("parsing {:?} ({:?} bytes)", path.display(), source.len());
+ let now = Instant::now();
+ let result = Module::from_string_with_options(source.clone(), parse_options.clone(), None);
match result {
Ok((module, state)) => {
@@ -70,45 +104,55 @@ fn main() -> Result<(), Box> {
if print_ast {
println!("{module:#?}");
}
- if source_maps || render_output || render_timings {
- let now = Instant::now();
- let to_string_options = ToStringOptions {
- expect_markers: true,
- include_type_annotations: type_annotations,
- pretty,
- comments: if pretty { Comments::All } else { Comments::None },
- // 60 is temp
- max_line_length: if pretty { 60 } else { u8::MAX },
- ..Default::default()
- };
+ if let Some(to_string_options) = to_string_options {
+ let now = Instant::now();
let (output, source_map) =
- module.to_string_with_source_map(&to_string_options, source_id, &fs);
+ module.to_string_with_source_map(to_string_options, source_id, fs);
- if timings || render_timings {
+ if timings {
eprintln!("ToString'ed in: {:?}", now.elapsed());
}
- if source_maps {
- let sm = source_map.unwrap().to_json(&fs);
- println!("{output}\n{sm}");
- }
- if render_output {
- println!("{output}");
+
+ println!("{output}");
+ if print_source_maps {
+ let sm = source_map.unwrap().to_json(fs);
+ println!("{sm}");
}
}
if display_keywords {
- println!("{:?}", state.keyword_positions.unwrap());
+ println!("{:?}", state.keyword_positions.as_ref());
}
+ if parse_imports {
+ for import in state.constant_imports.iter() {
+ // Don't reparse files (+ catches cycles)
+ let resolved_path = path.parent().unwrap().join(import);
+ if fs.get_paths().contains_key(&resolved_path) {
+ continue;
+ }
+ let _ = parse_path(
+ &resolved_path,
+ timings,
+ parse_imports,
+ parse_options,
+ print_ast,
+ print_source_maps,
+ to_string_options,
+ display_keywords,
+ fs,
+ )?;
+ }
+ }
Ok(())
}
Err(parse_err) => {
let mut line_column = parse_err
.position
.with_source(source_id)
- .into_line_column_span::(&fs);
+ .into_line_column_span::(fs);
{
// Editor are one indexed
line_column.line_start += 1;
diff --git a/parser/examples/simple.rs b/parser/examples/simple.rs
new file mode 100644
index 00000000..46475a22
--- /dev/null
+++ b/parser/examples/simple.rs
@@ -0,0 +1,9 @@
+#[allow(unused)]
+use ezno_parser::{ASTNode, Expression, Module};
+
+fn main() {
+ let source = "'Hello World!'".to_owned();
+ let parse_options = Default::default();
+ let result = Expression::from_string_with_options(source.clone(), parse_options, Some(40));
+ eprintln!("{result:#?}");
+}
diff --git a/parser/src/block.rs b/parser/src/block.rs
index 3070dde8..e37b0f20 100644
--- a/parser/src/block.rs
+++ b/parser/src/block.rs
@@ -43,7 +43,8 @@ impl StatementOrDeclaration {
on: ExportDeclaration::Default { .. }
| ExportDeclaration::Item {
exported: Exportable::ImportAll { .. }
- | Exportable::ImportParts { .. } | Exportable::Parts { .. },
+ | Exportable::ImportParts { .. }
+ | Exportable::Parts { .. },
..
},
..
@@ -392,10 +393,11 @@ pub(crate) fn parse_statements_and_declarations(
expect_semi_colon(reader, &state.line_starts, end, options)?
} else if options.retain_blank_lines {
let Token(kind, next) = reader.peek().ok_or_else(crate::parse_lexing_error)?;
- let lines = state.line_starts.byte_indexes_crosses_lines(end as usize, next.0 as usize);
if let TSXToken::EOS = kind {
- lines
+ 1
} else {
+ let lines =
+ state.line_starts.byte_indexes_crosses_lines(end as usize, next.0 as usize);
lines.saturating_sub(1)
}
} else {
diff --git a/parser/src/declarations/mod.rs b/parser/src/declarations/mod.rs
index 53d3344a..080f7dc6 100644
--- a/parser/src/declarations/mod.rs
+++ b/parser/src/declarations/mod.rs
@@ -78,8 +78,10 @@ impl Declaration {
token,
TSXToken::Keyword(
TSXKeyword::Let
- | TSXKeyword::Const | TSXKeyword::Function
- | TSXKeyword::Class | TSXKeyword::Export
+ | TSXKeyword::Const
+ | TSXKeyword::Function
+ | TSXKeyword::Class
+ | TSXKeyword::Export
) | TSXToken::At,
);
@@ -123,7 +125,8 @@ impl Declaration {
reader.peek_n(1),
Some(Token(
TSXToken::OpenBrace
- | TSXToken::Keyword(..) | TSXToken::Identifier(..)
+ | TSXToken::Keyword(..)
+ | TSXToken::Identifier(..)
| TSXToken::StringLiteral(..)
| TSXToken::Multiply,
_
diff --git a/parser/src/extensions/jsx.rs b/parser/src/extensions/jsx.rs
index 88f58aac..ebec2d33 100644
--- a/parser/src/extensions/jsx.rs
+++ b/parser/src/extensions/jsx.rs
@@ -485,7 +485,7 @@ pub fn html_tag_is_self_closing(tag_name: &str) -> bool {
| "hr" | "img"
| "input" | "link"
| "meta" | "param"
- | "source" | "track"
- | "wbr"
+ | "source"
+ | "track" | "wbr"
)
}
diff --git a/parser/src/functions/mod.rs b/parser/src/functions/mod.rs
index 7d86a4bb..84bb66a1 100644
--- a/parser/src/functions/mod.rs
+++ b/parser/src/functions/mod.rs
@@ -644,22 +644,23 @@ pub(crate) fn get_method_name(
state: &mut crate::ParsingState,
options: &ParseOptions,
) -> Result<(MethodHeader, WithComment>), crate::ParseError> {
- let is_named_get_set_or_async =
- matches!(
- reader.peek(),
- Some(Token(TSXToken::Keyword(kw), _))
- if kw.is_in_method_header()
- ) && matches!(
- reader.peek_n(1),
- Some(Token(
- TSXToken::OpenParentheses
- | TSXToken::Colon | TSXToken::OpenChevron
- | TSXToken::CloseBrace
- | TSXToken::Comma | TSXToken::QuestionMark
- | TSXToken::OptionalMember,
- _
- ))
- );
+ let is_named_get_set_or_async = matches!(
+ reader.peek(),
+ Some(Token(TSXToken::Keyword(kw), _))
+ if kw.is_in_method_header()
+ ) && matches!(
+ reader.peek_n(1),
+ Some(Token(
+ TSXToken::OpenParentheses
+ | TSXToken::Colon
+ | TSXToken::OpenChevron
+ | TSXToken::CloseBrace
+ | TSXToken::Comma
+ | TSXToken::QuestionMark
+ | TSXToken::OptionalMember,
+ _
+ ))
+ );
let (function_header, key) = if is_named_get_set_or_async {
let token = reader.next().unwrap();
diff --git a/parser/src/lexer.rs b/parser/src/lexer.rs
index 49c202ed..e6d68467 100644
--- a/parser/src/lexer.rs
+++ b/parser/src/lexer.rs
@@ -47,14 +47,18 @@ fn is_number_delimiter(chr: char) -> bool {
chr,
' ' | ','
| '\n' | '\r'
- | ';' | '+' | '-'
- | '*' | '/' | '&'
- | '|' | '!' | '^'
- | '(' | '{' | '['
- | ')' | '}' | ']'
- | '%' | '=' | ':'
- | '<' | '>' | '?'
- | '"' | '\'' | '`'
+ | ';' | '+'
+ | '-' | '*'
+ | '/' | '&'
+ | '|' | '!'
+ | '^' | '('
+ | '{' | '['
+ | ')' | '}'
+ | ']' | '%'
+ | '=' | ':'
+ | '<' | '>'
+ | '?' | '"'
+ | '\'' | '`'
| '#'
)
}
diff --git a/parser/src/lib.rs b/parser/src/lib.rs
index 7944ac01..93633225 100644
--- a/parser/src/lib.rs
+++ b/parser/src/lib.rs
@@ -266,10 +266,10 @@ pub(crate) fn throw_unexpected_token_with_token(
#[derive(Debug)]
pub struct ParsingState {
- pub(crate) line_starts: source_map::LineStarts,
- pub(crate) length_of_source: u32,
+ pub line_starts: source_map::LineStarts,
+ pub length_of_source: u32,
/// TODO as multithreaded channel + record is dynamic exists
- pub(crate) constant_imports: Vec,
+ pub constant_imports: Vec,
pub keyword_positions: Option,
pub partial_points: Vec,
}
diff --git a/parser/src/modules.rs b/parser/src/modules.rs
index f284b658..777e575f 100644
--- a/parser/src/modules.rs
+++ b/parser/src/modules.rs
@@ -44,7 +44,8 @@ impl ASTNode for Module {
state: &mut crate::ParsingState,
options: &ParseOptions,
) -> ParseResult {
- let span = Span { start: 0, source: (), end: state.length_of_source };
+ let start = reader.peek().map(|t| t.1 .0).unwrap_or_default();
+ let span = Span { start, source: (), end: start + state.length_of_source };
let hashbang_comment = if let Some(crate::Token(TSXToken::HashBangComment(_), _)) =
reader.peek()
{
diff --git a/parser/src/tokens.rs b/parser/src/tokens.rs
index 9c61e8b9..1cedf7ed 100644
--- a/parser/src/tokens.rs
+++ b/parser/src/tokens.rs
@@ -456,7 +456,8 @@ impl TSXToken {
| TSXToken::LogicalAnd
| TSXToken::LogicalOr
| TSXToken::Multiply
- | TSXToken::Add | TSXToken::Subtract
+ | TSXToken::Add
+ | TSXToken::Subtract
| TSXToken::Divide
) || self.is_assignment()
}
@@ -479,10 +480,14 @@ impl TSXToken {
self,
TSXToken::Keyword(
TSXKeyword::Function
- | TSXKeyword::If | TSXKeyword::For
- | TSXKeyword::While | TSXKeyword::Const
- | TSXKeyword::Let | TSXKeyword::Break
- | TSXKeyword::Import | TSXKeyword::Export
+ | TSXKeyword::If
+ | TSXKeyword::For
+ | TSXKeyword::While
+ | TSXKeyword::Const
+ | TSXKeyword::Let
+ | TSXKeyword::Break
+ | TSXKeyword::Import
+ | TSXKeyword::Export
)
)
}
diff --git a/parser/src/types/type_annotations.rs b/parser/src/types/type_annotations.rs
index 271e2b52..68009ab0 100644
--- a/parser/src/types/type_annotations.rs
+++ b/parser/src/types/type_annotations.rs
@@ -479,7 +479,8 @@ impl TypeAnnotation {
TSXToken::CloseParentheses
| TSXToken::CloseBracket
| TSXToken::CloseBrace
- | TSXToken::Comma | TSXToken::OpenChevron
+ | TSXToken::Comma
+ | TSXToken::OpenChevron
) || peek.is_assignment()
|| (start.map_or(false, |start| {
peek.is_statement_or_declaration_start()
diff --git a/src/ast_explorer.rs b/src/ast_explorer.rs
index 7879d421..d8fd7b8a 100644
--- a/src/ast_explorer.rs
+++ b/src/ast_explorer.rs
@@ -1,16 +1,12 @@
#![allow(dead_code)]
-use std::{fs, path::PathBuf};
+use std::path::PathBuf;
use argh::FromArgs;
-use console::style;
use enum_variants_strings::EnumVariantsStrings;
use parser::{source_map::FileSystem, ASTNode, Expression, Module, ToStringOptions};
-use crate::{
- reporting::report_diagnostics_to_cli,
- utilities::{print_to_cli, print_to_cli_without_newline},
-};
+use crate::{reporting::report_diagnostics_to_cli, utilities::print_to_cli};
/// REPL for printing out AST from user input
#[derive(FromArgs, Debug)]
@@ -24,23 +20,25 @@ pub(crate) struct ExplorerArguments {
}
impl ExplorerArguments {
+ #[cfg(target_family = "wasm")]
+ pub(crate) fn run(&mut self, _fs_resolver: &T) {
+ panic!("Cannot run ast-explorer in WASM because of input callback. Consider reimplementing using library");
+ }
+
#[allow(clippy::needless_continue)]
- pub(crate) fn run(
- &mut self,
- fs_resolver: &T,
- cli_input_resolver: U,
- ) {
+ #[cfg(not(target_family = "wasm"))]
+ pub(crate) fn run(&mut self, fs_resolver: &T) {
if let Some(ref file) = self.file {
- let content = fs_resolver.get_content_at_path(file);
+ let content = fs_resolver.read_file(file);
if let Some(content) = content {
- self.nested.run(content, Some(file.to_owned()));
+ self.nested.run(String::from_utf8(content).unwrap(), Some(file.to_owned()));
} else {
eprintln!("Could not find file at {}", file.display());
}
} else {
print_to_cli(format_args!("ezno ast-explorer\nUse #exit to leave. Also #switch-mode *mode name* and #load-file *path*"));
loop {
- let input = cli_input_resolver(self.nested.to_str()).unwrap_or_default();
+ let input = crate::utilities::cli_input_resolver(self.nested.to_str());
if input.is_empty() {
continue;
@@ -55,7 +53,7 @@ impl ExplorerArguments {
}
};
} else if let Some(path) = input.strip_prefix("#load-file ") {
- let input = match fs::read_to_string(path.trim()) {
+ let input = match std::fs::read_to_string(path.trim()) {
Ok(string) => string,
Err(err) => {
print_to_cli(format_args!("{err:?}"));
@@ -80,7 +78,6 @@ pub(crate) enum ExplorerSubCommand {
FullAST(FullASTArgs),
Prettifier(PrettyArgs),
Uglifier(UglifierArgs),
- Lexer(LexerArgs),
}
/// Prints AST for a given expression
@@ -99,6 +96,9 @@ pub(crate) struct FullASTArgs {
/// print results as json
#[argh(switch)]
json: bool,
+ /// just print whether parse was successful
+ #[argh(switch)]
+ check: bool,
}
/// Prettifies source code (full whitespace)
@@ -111,11 +111,6 @@ pub(crate) struct PrettyArgs {}
#[argh(subcommand, name = "uglifier")]
pub(crate) struct UglifierArgs {}
-/// Prints sources with tokens over
-#[derive(FromArgs, Debug, Default)]
-#[argh(subcommand, name = "lexer")]
-pub(crate) struct LexerArgs {}
-
impl ExplorerSubCommand {
pub fn run(&self, input: String, path: Option) {
match self {
@@ -148,11 +143,20 @@ impl ExplorerSubCommand {
ExplorerSubCommand::FullAST(cfg) => {
let mut fs =
parser::source_map::MapFileStore::::default();
- let source_id = fs.new_source_id(path.unwrap_or_default(), input.clone());
+ let source_id = fs.new_source_id(path.clone().unwrap_or_default(), input.clone());
let res = Module::from_string(input, parser::ParseOptions::all_features());
match res {
Ok(res) => {
- if cfg.json {
+ if cfg.check {
+ if let Some(ref path) = path {
+ print_to_cli(format_args!(
+ "{path} parsed successfully",
+ path = path.display()
+ ));
+ } else {
+ print_to_cli(format_args!("Parsed successfully",));
+ }
+ } else if cfg.json {
print_to_cli(format_args!(
"{}",
serde_json::to_string_pretty(&res).unwrap()
@@ -194,28 +198,6 @@ impl ExplorerSubCommand {
.unwrap(),
}
}
- ExplorerSubCommand::Lexer(_) => {
- let mut color = console::Color::Red;
- for (section, with) in parser::script_to_tokens(input) {
- if with {
- let value = style(section).bg(color);
- // Cycle through colors
- color = match color {
- console::Color::Red => console::Color::Green,
- console::Color::Green => console::Color::Yellow,
- console::Color::Yellow => console::Color::Blue,
- console::Color::Blue => console::Color::Magenta,
- console::Color::Magenta => console::Color::Cyan,
- console::Color::Cyan => console::Color::Red,
- _ => unreachable!(),
- };
- print_to_cli_without_newline(format_args!("{value}"));
- } else {
- print_to_cli_without_newline(format_args!("{section}"));
- }
- }
- print_to_cli(format_args!(""));
- }
}
}
}
diff --git a/src/build.rs b/src/build.rs
index 0bc7f94f..e2b4fcb0 100644
--- a/src/build.rs
+++ b/src/build.rs
@@ -1,11 +1,8 @@
-use std::{
- mem,
- path::{Path, PathBuf},
-};
+use std::{collections::HashMap, mem, path::PathBuf};
-use checker::{DiagnosticsContainer, TypeCheckOptions};
+use checker::TypeCheckOptions;
use parser::{
- source_map::{MapFileStore, SourceMap, WithPathMap},
+ source_map::{SourceId, SourceMap, WithPathMap},
ToStringOptions,
};
@@ -17,45 +14,53 @@ pub struct Output {
pub mappings: SourceMap,
}
-#[cfg_attr(target_family = "wasm", derive(serde::Serialize, tsify::Tsify))]
pub struct BuildOutput {
- pub outputs: Vec