diff --git a/.github/workflows/clippy-rustfmt-fix.yml b/.github/workflows/clippy-rustfmt-fix.yml
index 620863af..fc2cfd76 100644
--- a/.github/workflows/clippy-rustfmt-fix.yml
+++ b/.github/workflows/clippy-rustfmt-fix.yml
@@ -12,7 +12,7 @@ env:
target/
jobs:
- publish:
+ run-and-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -23,15 +23,19 @@ jobs:
- name: Run automated fixes
run: |
- cargo clippy --fix
+ # Run clippy on projects
+ cargo clippy --fix --manifest-path ./parser/Cargo.toml --allow-dirty
+ cargo clippy --fix --manifest-path ./checker/Cargo.toml --allow-dirty
+ cargo clippy --fix --allow-dirty
+
+ # Format
cargo fmt
- - name: Commit
+ - name: Commit changes
run: |
- git add .
- git commit -m "Run clippy --fix & formatting"
-
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
-
+
+ git add .
+ git commit -m "Run clippy --fix & cargo fmt"
git push
diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml
index 86484f2c..f2fd325a 100644
--- a/.github/workflows/github-release.yml
+++ b/.github/workflows/github-release.yml
@@ -21,7 +21,8 @@ jobs:
runs-on: ubuntu-latest
outputs:
- new-ezno-version: ${{ steps.get-version.outputs.new-ezno-version }}
+ LATEST_EZNO_VERSION: ${{ steps.get-version.outputs.LATEST_EZNO_VERSION }}
+ LATEST_EZNO_VERSION_DASH: ${{ steps.get-version.outputs.LATEST_EZNO_VERSION_DASH }}
SPONSORS: ${{ steps.get-sponsors-and-contributors.outputs.SPONSORS }}
CONTRIBUTORS: ${{ steps.get-sponsors-and-contributors.outputs.CONTRIBUTORS }}
@@ -38,43 +39,32 @@ jobs:
git for-each-ref --sort=creatordate --format '%(refname:short)' 'refs/tags/release/ezno-[0-9]*'
echo "::endgroup::"
- TAG=$(git for-each-ref --sort=creatordate --format '%(refname:short)' 'refs/tags/release/ezno-[0-9]*' | tail -n 1)
- echo "Building GH release for ${TAG:13}"
- echo "new-ezno-version=${TAG:13}" >> $GITHUB_OUTPUT
+ TAG=$(git for-each-ref --sort=creatordate --format '%(refname:short)' 'refs/tags/release/ezno-[0-9]*' | tail -n 1 | cut -c 14-)
- # Replace '.' with '-'
- NAME_VERSION=$(echo $VERSION | sed -e "s/\./-/g")
+ echo "::notice::Releasing with found version $TAG"
+ echo "LATEST_EZNO_VERSION=${TAG}" >> "$GITHUB_OUTPUT"
+ echo "LATEST_EZNO_VERSION_DASH=${TAG//./-}" >> "$GITHUB_OUTPUT"
else
- VERSION="${{ inputs.ezno-version }}"
- echo "Building GH release for ${VERSION}"
- echo "new-ezno-version=${VERSION}" >> $GITHUB_OUTPUT
+ TAG="${{ inputs.ezno-version }}"
- # Replace '.' with '-'
- NAME_VERSION=$(echo $VERSION | sed -e "s/\./-/g")
+ echo "::notice::Releasing with specific version $TAG"
+ echo "LATEST_EZNO_VERSION=${TAG}" >> "$GITHUB_OUTPUT"
+ echo "LATEST_EZNO_VERSION_DASH=${TAG//./-}" >> "$GITHUB_OUTPUT"
fi
- - id: get-sponsors-and-contributors
+ - name: Get sponsors and contributors
+ id: get-sponsors-and-contributors
run: |
- SPONSORS=$(gh api graphql -f query='{
- user(login: "kaleidawave") {
- sponsorshipsAsMaintainer(first: 100, activeOnly: false) {
- edges {
- node {
- sponsor {
- name, login
- }
- }
- }
- }
- }
- }' -q '.data.user.sponsorshipsAsMaintainer.edges | map(.node.sponsor.name // .node.sponsor.login) | join(",")')
-
- CONTRIBUTORS=$(
- gh pr list --state merged --json author | jq 'map(.author.name // .author.login) | unique | join(",")' --raw-output
- )
+ SQP='.data.user.sponsorshipsAsMaintainer.edges | map(.node.sponsor.name // .node.sponsor.login) | join(",")'
+ GQL_SQ='{ user(login: "kaleidawave") { sponsorshipsAsMaintainer(first: 100, activeOnly: false) { edges { node { sponsor { name, login } } } } } }'
+ SPONSORS=$(gh api graphql -f query="$GQL_SQ" -q "$SQP")
+
+ CQP='map(.author.name // .author.login) | unique | join(",")'
+ CONTRIBUTORS=$(gh pr list --state merged --json author | jq "$CQP" --raw-output)
- echo "SPONSORS=$SPONSORS" # >> $GITHUB_OUTPUT
- echo "CONTRIBUTORS=$CONTRIBUTORS"
+ echo "SPONSORS=$SPONSORS" >> "$GITHUB_OUTPUT"
+ echo "CONTRIBUTORS=$CONTRIBUTORS" >> "$GITHUB_OUTPUT"
+ echo "::notice::CONTRIBUTORS=$CONTRIBUTORS and SPONSORS=$SPONSORS"
shell: bash
env:
@@ -88,13 +78,16 @@ jobs:
os: [ubuntu-latest, windows-latest]
include:
- os: windows-latest
- executable-extension: .exe
- platform_name: x86_64-pc-windows
+ platform-name: x86_64-pc-windows
+ executable-extension: ".exe"
- os: ubuntu-latest
- platform_name: x86_64-unknown-linux
+ platform-name: x86_64-unknown-linux
runs-on: ${{ matrix.os }}
+ env:
+ LEVEL: release
+
# Important that everything here works in all the above OSes!
steps:
- uses: actions/checkout@v4
@@ -110,10 +103,10 @@ jobs:
SPONSORS: ${{ needs.get-build-info.outputs.SPONSORS }}
CONTRIBUTORS: ${{ needs.get-build-info.outputs.CONTRIBUTORS }}
- - name: Rename and move release assets
+ - name: Rename and move ${{ env.LEVEL }} assets
run: |
mkdir artifacts
- mv target/release/ezno${{ matrix.executable-extension }} "artifacts/ezno-${{ needs.get-build-info.outputs.new-ezno-version }}-${{ matrix.platform_name }}${{ matrix.executable-extension }}"
+ mv "target/${{ env.LEVEL }}/ezno${{ matrix.executable-extension }}" "artifacts/ezno-${{ needs.get-build-info.outputs.LATEST_EZNO_VERSION_DASH }}-${{ matrix.platform-name }}${{ matrix.executable-extension }}"
- uses: actions/upload-artifact@v4
with:
@@ -144,8 +137,8 @@ jobs:
- name: GitHub release
uses: softprops/action-gh-release@v1
with:
- name: "Ezno ${{ needs.get-build-info.outputs.new-ezno-version }}"
- tag_name: "release/ezno-${{ needs.get-build-info.outputs.new-ezno-version }}"
+ name: "Ezno ${{ needs.get-build-info.outputs.LATEST_EZNO_VERSION }}"
+ tag_name: "release/ezno-${{ needs.get-build-info.outputs.LATEST_EZNO_VERSION }}"
body: "For @kaleidawave to update"
files: |
README.md
diff --git a/.github/workflows/performance-and-size.yml b/.github/workflows/performance-and-size.yml
index 73115d45..6d5dcdb6 100644
--- a/.github/workflows/performance-and-size.yml
+++ b/.github/workflows/performance-and-size.yml
@@ -45,6 +45,18 @@ jobs:
name: latest-checker
path: previous-ezno
+ - name: Set compilers
+ id: compilers
+ shell: bash
+ run: |
+ if [ -d "previous-ezno" ]; then
+ echo "::notice::Comparing against previous"
+ echo "BINARIES=./target/release/ezno,./previous-ezno/ezno" >> "$GITHUB_OUTPUT"
+ else
+ echo "::notice::Running singularly"
+ echo "BINARIES=./target/release/ezno" >> "$GITHUB_OUTPUT"
+ fi
+
- name: Run checker performance
shell: bash
run: |
@@ -56,7 +68,7 @@ jobs:
echo "### Checking
\`\`\`shell
- $(hyperfine -i './target/release/ezno check demo.tsx')
+ $(hyperfine -i -L compiler ${{ steps.compilers.outputs.BINARIES }} '{compiler} check demo.tsx')
\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "
@@ -87,17 +99,34 @@ jobs:
$diagnostics
\`\`\`
-
-
- Statistics
-
- \`\`\`
- $statistics
- \`\`\`
-
" >> $GITHUB_STEP_SUMMARY
- # Get just statistics: `| rg "Diagnostics:" -A 100`
+ if [ -d "previous-ezno" ]; then
+ OUT=$(./previous-ezno/ezno check demo.tsx --timings --max-diagnostics all 2>&1 || true)
+ $base_statistics=$(echo $OUT | rg "Diagnostics:" -A 100)
+ echo "
+
+ Statistics
+
+ \`\`\`
+ $statistics
+ \`\`\`
+ against base
+ \`\`\`
+ $base_statistics
+ \`\`\`
+ against base
+
+ " >> $GITHUB_STEP_SUMMARY
+ else
+ echo "
+ Statistics
+
+ \`\`\`
+ $statistics
+ \`\`\`
+ " >> $GITHUB_STEP_SUMMARY
+ fi
- name: Run checker performance w/staging
shell: bash
@@ -109,7 +138,8 @@ jobs:
cargo run -p ezno-parser --example code_blocks_to_script all.md --comment-headers --out ./all.tsx
./target/release/ezno check all.tsx --timings || true
- hyperfine -i './target/release/ezno check all.tsx'
+
+ hyperfine -i -L compiler ${{ steps.compilers.outputs.BINARIES }} '{compiler} check all.tsx'
echo "::endgroup::"
- name: Run checker performance on large file
@@ -125,40 +155,52 @@ jobs:
done
./target/release/ezno check large.tsx --timings --max-diagnostics 0 || true
- hyperfine -i './target/release/ezno check large.tsx'
+
+ hyperfine -i -L compiler ${{ steps.compilers.outputs.BINARIES }} '{compiler} check large.tsx'
echo "::endgroup::"
- - name: Valgrind
+ - name: Valgrind and callgrind
shell: bash
+ continue-on-error: true
run: |
- echo "::group::Callgrind"
- valgrind --tool=callgrind --callgrind-out-file=cpu-out ./target/release/ezno check demo.tsx | true
- echo "CPU usage:"
- head -n100 cpu-out
- echo "::endgroup::"
+ IFS=',' read -ra ITEMS <<< ${{ steps.compilers.outputs.BINARIES }}
- echo "::group::Valgrind"
- valgrind --log-file=memory-out ./target/release/ezno check demo.tsx | true
- echo "Memory usage:"
- cat memory-out
- echo "::endgroup::"
+ for compiler in ${ITEMS[@]}; do
+ echo "::group::Running $compiler"
+
+ echo "::group::Callgrind"
+ valgrind --tool=callgrind --callgrind-out-file=cpu-out $compiler check demo.tsx | true
+ echo "CPU usage:"
+ head -n100 cpu-out
+ echo "::endgroup::"
+
+ echo "::group::Valgrind"
+ valgrind --log-file=memory-out $compiler check demo.tsx | true
+ echo "Memory usage:"
+ cat memory-out
+ echo "::endgroup::"
+
+ echo "::endgroup::"
+ done
- name: Run parsing & stringing (minfied) benchmarks
shell: bash
+ continue-on-error: true
run: |
strings=(
"https://esm.sh/v128/react-dom@18.2.0/es2022/react-dom.mjs"
+ "https://esm.sh/v135/typescript@5.3.3/es2022/typescript.mjs"
)
- # Currently broken "https://esm.sh/v135/typescript@5.3.3/es2022/typescript.mjs"
for url in "${strings[@]}"; do
- curl -sS $url > input.js
- echo "--- debug: $url ---"
- cargo run -p ezno-parser --example parse input.js --timings --render-timings
- echo "--- release: $url ---"
- cargo run -p ezno-parser --release --example parse input.js --timings --render-timings
-
- hyperfine "./target/debug/examples/parse input.js" "./target/release/examples/parse input.js"
+ # TODO copy expression
+ curl -sS $url > input.js
+
+ echo "::group::Comparison"
+ hyperfine \
+ -L compiler ${{ steps.compilers.outputs.BINARIES }} \
+ '{compiler} ast-explorer full input.js --timings'
+ echo "::endgroup::"
done
- name: Upload checker
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 193a1505..f7411899 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -58,30 +58,20 @@ jobs:
echo "publish-json-args=$KEY_PAIR_ARGS_JSON" >> $GITHUB_OUTPUT
shell: bash
- - id: get-sponsors
+ # Needed for WASM
+ - name: Get sponsors and contributors
+ id: get-sponsors-and-contributors
run: |
- SPONSORS=$(
- gh api graphql -f query='{
- user(login: "kaleidawave") {
- sponsorshipsAsMaintainer(first: 100, activeOnly: false) {
- edges {
- node {
- sponsor {
- name, login
- }
- }
- }
- }
- }
- }' -q '.data.user.sponsorshipsAsMaintainer.edges | map(.node.sponsor.name // .node.sponsor.login) | join(",")'
- )
-
- CONTRIBUTORS=$(
- gh pr list --state merged --json author | jq 'map(.author.name // .author.login) | unique | join(",")' --raw-output
- )
-
- echo "SPONSORS=$SPONSORS" >> $GITHUB_OUTPUT
- echo "CONTRIBUTORS=$CONTRIBUTORS" >> $GITHUB_OUTPUT
+ SQP='.data.user.sponsorshipsAsMaintainer.edges | map(.node.sponsor.name // .node.sponsor.login) | join(",")'
+ GQL_SQ='{ user(login: "kaleidawave") { sponsorshipsAsMaintainer(first: 100, activeOnly: false) { edges { node { sponsor { name, login } } } } } }'
+ SPONSORS=$(gh api graphql -f query="$GQL_SQ" -q "$SQP")
+
+ CQP='map(.author.name // .author.login) | unique | join(",")'
+ CONTRIBUTORS=$(gh pr list --state merged --json author | jq "$CQP" --raw-output)
+
+ echo "SPONSORS=$SPONSORS" >> "$GITHUB_OUTPUT"
+ echo "CONTRIBUTORS=$CONTRIBUTORS" >> "$GITHUB_OUTPUT"
+ echo "::notice::CONTRIBUTORS=$CONTRIBUTORS and SPONSORS=$SPONSORS"
shell: bash
env:
@@ -94,8 +84,8 @@ jobs:
version: ${{ steps.set-arguments.outputs.publish-json-args }}
crates-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
env:
- SPONSORS: ${{ steps.get-sponsors.outputs.SPONSORS }}
- CONTRIBUTORS: ${{ steps.get-sponsors.outputs.CONTRIBUTORS }}
+ SPONSORS: ${{ steps.get-sponsors-and-contributors.outputs.SPONSORS }}
+ CONTRIBUTORS: ${{ steps.get-sponsors-and-contributors.outputs.CONTRIBUTORS }}
- name: Add WASM to rustup
if: ${{ inputs.ezno-version != 'none' }}
@@ -115,7 +105,8 @@ jobs:
ls dist
working-directory: src/js-cli-and-library
env:
- SPONSORS: ${{ steps.get-sponsors.outputs.sponsors }}
+ SPONSORS: ${{ steps.get-sponsors-and-contributors.outputs.SPONSORS }}
+ CONTRIBUTORS: ${{ steps.get-sponsors-and-contributors.outputs.CONTRIBUTORS }}
- name: NPM publish (CLI and library)
if: ${{ inputs.ezno-version != 'none' }}
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index b2f348df..e45c83c5 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -152,7 +152,7 @@ jobs:
- uses: actions/setup-node@v4
if: steps.changes.outputs.src == 'true' || github.ref_name == 'main'
with:
- node-version: 18
+ node-version: 23
- name: Check parser without extras
if: steps.changes.outputs.parser == 'true'
@@ -174,17 +174,19 @@ jobs:
- name: Build and test WASM
if: steps.changes.outputs.src == 'true' || github.ref_name == 'main'
+ timeout-minutes: 5
run: |
# TODO `cargo check --target wasm32-unknown-unknown --lib` might be good enough
rustup target add wasm32-unknown-unknown
npm ci
npm run build
- npm run run-tests
node ./dist/cli.cjs info
deno run -A ./dist/cli.mjs info
+ npm run run-tests
+
npx -p typescript tsc --strict --pretty ./build/ezno_lib.d.ts
echo "debug checked with TSC"
cargo run -p ezno-parser --example parse ./build/ezno_lib.d.ts --type-definition-module
@@ -206,6 +208,7 @@ jobs:
retention-days: 3
fuzzing_parser:
+ if: ${{ github.ref == 'main' || !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'fuzz-me') }}
needs: validity
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -263,6 +266,7 @@ jobs:
working-directory: parser/fuzz
fuzzing_checker:
+ if: ${{ github.ref == 'main' || !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'fuzz-me') }}
needs: validity
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -359,9 +363,11 @@ jobs:
performance-and-size:
# WIP
runs-on: ubuntu-latest
+ needs: validity
steps:
+ - uses: actions/checkout@v4
- name: Kick off other workflow if the PR has a label
- if: contains(github.event.pull_request.labels.*.name, 'compiler-performance')
- run: |
- echo ${{ github.event.after }}
- gh workflow run performance-and-size.yml --ref ${{ github.event.after }}
\ No newline at end of file
+ if: github.ref_name != 'main' && contains(github.event.pull_request.labels.*.name, 'compiler-performance')
+ run: gh workflow run performance-and-size.yml --ref "${{ github.head_ref }}"
+ env:
+ GH_TOKEN: ${{ github.token }}
diff --git a/Cargo.lock b/Cargo.lock
index 42ea9d81..b6be9713 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -39,7 +39,7 @@ dependencies = [
"argh_shared",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
@@ -90,15 +90,15 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bytemuck"
-version = "1.18.0"
+version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae"
+checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d"
[[package]]
name = "cc"
-version = "1.1.25"
+version = "1.1.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8d9e0b4957f635b8d3da819d0db5603620467ecf1f692d22a8c2717ce27e6d8"
+checksum = "baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70"
dependencies = [
"shlex",
]
@@ -190,7 +190,7 @@ checksum = "42e5ddace13a8459cb452b19e01f59f16d3e2049c8b808f338a13eeadc326e33"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
@@ -211,7 +211,7 @@ dependencies = [
"either_n",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
@@ -266,7 +266,7 @@ dependencies = [
"proc-macro2",
"quote",
"string-cases",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
@@ -296,6 +296,7 @@ dependencies = [
"multiline-term-input",
"native-tls",
"notify",
+ "notify-debouncer-full",
"pretty_assertions",
"self-replace",
"serde",
@@ -389,6 +390,15 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
+[[package]]
+name = "file-id"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6584280525fb2059cba3db2c04abf947a1a29a45ddae89f3870f8281704fafc9"
+dependencies = [
+ "windows-sys 0.48.0",
+]
+
[[package]]
name = "filetime"
version = "0.2.25"
@@ -547,9 +557,9 @@ checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760"
[[package]]
name = "libc"
-version = "0.2.159"
+version = "0.2.161"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5"
+checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
[[package]]
name = "libredox"
@@ -568,6 +578,16 @@ version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
+[[package]]
+name = "lock_api"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
[[package]]
name = "log"
version = "0.4.22"
@@ -664,6 +684,20 @@ dependencies = [
"windows-sys 0.48.0",
]
+[[package]]
+name = "notify-debouncer-full"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49f5dab59c348b9b50cf7f261960a20e389feb2713636399cd9082cd4b536154"
+dependencies = [
+ "crossbeam-channel",
+ "file-id",
+ "log",
+ "notify",
+ "parking_lot",
+ "walkdir",
+]
+
[[package]]
name = "num-traits"
version = "0.2.19"
@@ -675,18 +709,15 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.20.1"
+version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1"
-dependencies = [
- "portable-atomic",
-]
+checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "openssl"
-version = "0.10.66"
+version = "0.10.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1"
+checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5"
dependencies = [
"bitflags 2.6.0",
"cfg-if",
@@ -705,7 +736,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
@@ -716,9 +747,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
-version = "0.9.103"
+version = "0.9.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6"
+checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741"
dependencies = [
"cc",
"libc",
@@ -728,13 +759,36 @@ dependencies = [
[[package]]
name = "ordered-float"
-version = "4.3.0"
+version = "4.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44d501f1a72f71d3c063a6bbc8f7271fa73aa09fe5d6283b6571e2ed176a2537"
+checksum = "c65ee1f9701bf938026630b455d5315f490640234259037edb259798b3bcf85e"
dependencies = [
"num-traits",
]
+[[package]]
+name = "parking_lot"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-targets 0.52.6",
+]
+
[[package]]
name = "paste"
version = "1.0.15"
@@ -765,12 +819,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
-[[package]]
-name = "portable-atomic"
-version = "1.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2"
-
[[package]]
name = "pretty_assertions"
version = "1.4.1"
@@ -783,9 +831,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.86"
+version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
+checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e"
dependencies = [
"unicode-ident",
]
@@ -829,9 +877,9 @@ dependencies = [
[[package]]
name = "rustix"
-version = "0.38.37"
+version = "0.38.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811"
+checksum = "375116bee2be9ed569afe2154ea6a99dfdffd257f533f187498c2a8f5feaf4ee"
dependencies = [
"bitflags 2.6.0",
"errno",
@@ -857,13 +905,19 @@ dependencies = [
[[package]]
name = "schannel"
-version = "0.1.24"
+version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b"
+checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1"
dependencies = [
"windows-sys 0.59.0",
]
+[[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
[[package]]
name = "security-framework"
version = "2.11.1"
@@ -920,9 +974,9 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.210"
+version = "1.0.214"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
+checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5"
dependencies = [
"serde_derive",
]
@@ -940,13 +994,13 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.210"
+version = "1.0.214"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
+checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
@@ -957,14 +1011,14 @@ checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
name = "serde_json"
-version = "1.0.128"
+version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
+checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
dependencies = [
"itoa",
"memchr",
@@ -984,6 +1038,12 @@ version = "0.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "837c5f8866aeabd3ac8addcf50ef1e2779fe8c2a8d74bdd97102bdfe5605c629"
+[[package]]
+name = "smallvec"
+version = "1.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
+
[[package]]
name = "source-map"
version = "0.15.0"
@@ -1016,9 +1076,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.79"
+version = "2.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590"
+checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
dependencies = [
"proc-macro2",
"quote",
@@ -1034,7 +1094,7 @@ dependencies = [
"either_n",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
@@ -1096,7 +1156,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde_derive_internals",
- "syn 2.0.79",
+ "syn 2.0.87",
]
[[package]]
@@ -1160,7 +1220,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
"wasm-bindgen-shared",
]
@@ -1204,7 +1264,7 @@ checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -1438,5 +1498,5 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.79",
+ "syn 2.0.87",
]
diff --git a/Cargo.toml b/Cargo.toml
index e3a30122..7f0a5fd1 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -12,7 +12,7 @@ members = [
[package]
name = "ezno"
-description = "A JavaScript type checker and compiler. For use as a library or through the CLI"
+description = "A fast and correct TypeScript type checker and compiler with additional experiments. For use as a library or through the CLI"
authors = ["Ben "]
version = "0.0.22"
edition = "2021"
@@ -43,12 +43,12 @@ base64 = "0.21"
console = "0.15"
codespan-reporting = "0.11"
enum-variants-strings = "0.3"
-glob = "0.3"
# For `StrComparison` for string comparison
pretty_assertions = "1.3.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
simple-json-parser = "0.0.2"
+js-sys = "0.3"
[target.'cfg(not(target_family = "wasm"))'.dependencies]
# For updating binary
@@ -59,6 +59,8 @@ native-tls = "0.2.11"
multiline-term-input = "0.1.0"
# For watching files
notify = "6.1.0"
+notify-debouncer-full = "0.3.1"
+glob = "0.3"
[dependencies.checker]
path = "./checker"
diff --git a/README.md b/README.md
index 364aca7b..f0c1165e 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-A JavaScript compiler and TypeScript checker written in Rust with a focus on static analysis and runtime performance.
+A fast and correct TypeScript type checker and compiler with additional experiments
> [!IMPORTANT]
> Ezno is in active development and **does not currently support enough features to check existing projects** (see [blocking issues](https://github.com/kaleidawave/ezno/labels/blocking)). Check out the [getting started guide](./checker/documentation/getting-started.md) for experimenting with what it [currently supports](./checker/specification/specification.md).
@@ -28,6 +28,7 @@ Read more about Ezno (in chronological order)
- [Ezno in '23](https://kaleidawave.github.io/posts/ezno-23/)
- [A preview of the checker](https://kaleidawave.github.io/posts/a-preview-of-the-checker/)
- [The quest continues](https://kaleidawave.github.io/posts/the-quest-continues/)
+- [Sets, types and type checking](https://kaleidawave.github.io/posts/sets-types-and-type-checking/) (*general post*)
---
diff --git a/checker/Cargo.toml b/checker/Cargo.toml
index 9832588e..2b7614b9 100644
--- a/checker/Cargo.toml
+++ b/checker/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "ezno-checker"
-description = "A type checker for JavaScript"
+description = "A fast and correct TypeScript type checker with additional experiments"
version = "0.0.17"
license = "MIT"
repository = "https://github.com/kaleidawave/ezno"
diff --git a/checker/definitions/overrides.d.ts b/checker/definitions/overrides.d.ts
index 2cfb280f..57e15661 100644
--- a/checker/definitions/overrides.d.ts
+++ b/checker/definitions/overrides.d.ts
@@ -369,9 +369,21 @@ declare class Object {
@Constant
static freeze(on: object): object;
+ @Constant
+ static seal(on: object): object;
+
+ @Constant
+ static preventExtensions(on: object): object;
+
@Constant
static isFrozen(on: object): boolean;
+ @Constant
+ static isSealed(on: object): boolean;
+
+ @Constant
+ static isExtensible(on: object): boolean;
+
// TODO defineProperties via body (not constant)
@Constant
static defineProperty(on: object, property: string, discriminator: PropertyDescriptor): boolean;
diff --git a/checker/definitions/simple.d.ts b/checker/definitions/simple.d.ts
index 7ef3ae1a..01aed46e 100644
--- a/checker/definitions/simple.d.ts
+++ b/checker/definitions/simple.d.ts
@@ -372,9 +372,21 @@ declare class Object {
@Constant
static freeze(on: object): object;
+ @Constant
+ static seal(on: object): object;
+
+ @Constant
+ static preventExtensions(on: object): object;
+
@Constant
static isFrozen(on: object): boolean;
+ @Constant
+ static isSealed(on: object): boolean;
+
+ @Constant
+ static isExtensible(on: object): boolean;
+
// TODO defineProperties via body (not constant)
@Constant
static defineProperty(on: object, property: string, discriminator: PropertyDescriptor): boolean;
diff --git a/checker/examples/run_checker.rs b/checker/examples/run_checker.rs
index f81081eb..74581100 100644
--- a/checker/examples/run_checker.rs
+++ b/checker/examples/run_checker.rs
@@ -54,7 +54,7 @@ fn main() {
let result = check_project::<_, synthesis::EznoParser>(
entry_points,
type_definition_files,
- resolver,
+ &resolver,
options,
(),
None,
diff --git a/checker/fuzz/fuzz_targets/check_project_naive.rs b/checker/fuzz/fuzz_targets/check_project_naive.rs
index 6a6039d9..014b25ea 100644
--- a/checker/fuzz/fuzz_targets/check_project_naive.rs
+++ b/checker/fuzz/fuzz_targets/check_project_naive.rs
@@ -19,7 +19,7 @@ fn do_fuzz(data: &str) -> Corpus {
let _result = check_project::<_, synthesis::EznoParser>(
vec![root.into()],
type_definition_files,
- |_path: &std::path::Path| Some(input.to_owned()),
+ &|_path: &std::path::Path| Some(input.to_owned()),
options,
(),
None,
diff --git a/checker/specification/specification.md b/checker/specification/specification.md
index 74c7194f..6e26cb6b 100644
--- a/checker/specification/specification.md
+++ b/checker/specification/specification.md
@@ -529,22 +529,6 @@ keys satisfies boolean
- Expected boolean, found "nbd"
-#### `Object.freeze`
-
-> TODO seal & preventExtensions
-
-```ts
-const obj = {}
-let result = Object.freeze(obj);
-(obj === result) satisfies true;
-obj.property = 2;
-Object.isFrozen(obj) satisfies true;
-```
-
-> TODO maybe error should say that whole object is frozen
-
-- Cannot write to property 'property'
-
#### `Object.defineProperty` writable
> TODO defineProperties
@@ -634,7 +618,86 @@ obj satisfies string;
```
- Expected string, found { a: 1, b: 2, c: 3 }
-s
+
+#### `Object.freeze`
+
+> When `Object.freeze` is called, the object's `isSealed` is inferred as `true`
+
+```ts
+const obj = {}
+let result = Object.freeze(obj);
+(obj === result) satisfies true;
+obj.property = 2;
+Object.isSealed(obj) satisfies true;
+```
+
+- Cannot write to property 'property'
+
+#### `Object.seal`
+
+> When `Object.seal` is called, the object's `isFrozen` and `isSealed` are inferred as `true`
+
+```ts
+const obj = { a: 2 }
+let result = Object.seal(obj);
+(obj === result) satisfies true;
+
+// Allowed
+obj.a = 4;
+// Not allowed
+obj.property = 2;
+
+Object.isSealed(obj) satisfies true;
+Object.isFrozen(obj) satisfies false;
+```
+
+- Cannot write to property 'property'
+
+#### `Object.preventExtensions`
+
+> When `Object.preventExtensions` is called, the object's `isFrozen` and `isSealed` are inferred as `true`
+
+```ts
+const obj = { a: 2 }
+let result = Object.preventExtensions(obj);
+(obj === result) satisfies true;
+
+// Allowed
+obj.a = 4;
+// Not allowed
+obj.property = 2;
+
+Object.isFrozen(obj) satisfies false;
+Object.isSealed(obj) satisfies false;
+```
+
+- Cannot write to property 'property'
+
+#### `Object.isExtensible`
+
+> The object that has been applied `Object.seal`, `Object.freeze` and `Object.preventExtensions` returns `false` by `Object.isExtensible`, otherwise returns `true`
+
+```ts
+{
+ const obj = {}
+ Object.isExtensible(obj) satisfies true;
+ Object.preventExtensions(obj);
+ Object.isExtensible(obj) satisfies false;
+}
+{
+ const obj = {}
+ Object.seal(obj);
+ Object.isExtensible(obj) satisfies false;
+}
+{
+ const obj = {}
+ Object.freeze(obj);
+ Object.isExtensible(obj) satisfies 5;
+}
+```
+
+- Expected 5, found false
+
### Excess properties
> The following work through the same mechanism as forward inference
@@ -2379,7 +2442,7 @@ fakeRead(array1)
#### Always known math
```ts
-// True regardless of
+// True regardless of
function func(a: number) { return a ** 0 }
func satisfies string;
@@ -3306,8 +3369,13 @@ type X = string;
type X = number;
const a: X = "hello world";
}
+
+function func() {}
+
+type B = YEA;
```
+- Could not find type 'YEA'
- Type "hello world" is not assignable to type X
#### Type has no generics
@@ -3622,18 +3690,6 @@ box(someNumber) satisfies boolean;
- Expected string, found number
- Expected boolean, found { item: number }
-#### Template literal type restriction
-
-```ts
-type Name = "Ben"
-"test" satisfies `Hello ${Name}`;
-"Hello Ben" satisfies `Hello ${Name}`;
-```
-
-> Should be `Expected "Hello Ben", found "test"`. See #188
-
-- Expected string, found "test"
-
#### Template literal type specialisation
> Uses `+` logic behind the scenes
@@ -3689,14 +3745,18 @@ interface X {
#### Template literal types
+> Last one tests printing
+
```ts
type Introduction = `Hello ${string}`;
const first: Introduction = "Hello Ben";
const second: Introduction = "Hi Ben";
+const third: `Hiya ${string}` = "Hello Ben";
```
- Type "Hi Ben" is not assignable to type Introduction
+- Type "Hello Ben" is not assignable to type `Hiya ${string}`
#### Assigning to types as keys
@@ -4035,7 +4095,7 @@ x.property_a satisfies number;
x.property_b
```
-- No property 'property_b' on { [string]: X[keyof X & string] }
+- No property 'property_b' on { [`property_${keyof X & string}`]: X[keyof X & string] }
### Readonly and `as const`
diff --git a/checker/specification/test.rs b/checker/specification/test.rs
index 10a662d5..b98dd727 100644
--- a/checker/specification/test.rs
+++ b/checker/specification/test.rs
@@ -94,7 +94,7 @@ fn check_expected_diagnostics(
let result = checker::check_project::<_, EznoParser>(
vec![PathBuf::from("main.tsx")],
type_definition_files,
- resolver,
+ &resolver,
type_check_options,
(),
None,
diff --git a/checker/src/context/information.rs b/checker/src/context/information.rs
index 9ec159c9..67ba6901 100644
--- a/checker/src/context/information.rs
+++ b/checker/src/context/information.rs
@@ -1,5 +1,5 @@
use source_map::SpanWithSource;
-use std::collections::{HashMap, HashSet};
+use std::collections::HashMap;
use crate::{
events::{Event, RootReference},
@@ -32,8 +32,8 @@ pub struct LocalInformation {
/// `ContextId` is a mini context
pub(crate) closure_current_values: HashMap<(ClosureId, RootReference), TypeId>,
- /// Not writeable, `TypeError: Cannot add property t, object is not extensible`. TODO conditional ?
- pub(crate) frozen: HashSet,
+ /// Not writeable, `TypeError: Cannot add property, object is not extensible`. TODO conditional ?
+ pub(crate) frozen: HashMap,
/// Object type (LHS), must always be RHS
///
@@ -52,6 +52,13 @@ pub struct LocalInformation {
pub(crate) value_of_this: ThisValue,
}
+#[derive(Debug, Clone, Copy, binary_serialize_derive::BinarySerializable)]
+pub enum ObjectProtectionState {
+ Frozen,
+ Sealed,
+ NoExtensions,
+}
+
#[derive(Debug, Default, binary_serialize_derive::BinarySerializable, Clone)]
pub(crate) enum ReturnState {
#[default]
@@ -228,7 +235,7 @@ impl LocalInformation {
.extend(other.current_properties.iter().map(|(l, r)| (*l, r.clone())));
self.closure_current_values
.extend(other.closure_current_values.iter().map(|(l, r)| (l.clone(), *r)));
- self.frozen.extend(other.frozen.iter().clone());
+ self.frozen.extend(other.frozen.clone());
self.narrowed_values.extend(other.narrowed_values.iter().copied());
self.state = other.state.clone();
}
diff --git a/checker/src/context/invocation.rs b/checker/src/context/invocation.rs
index 4435e4a8..c20fa8da 100644
--- a/checker/src/context/invocation.rs
+++ b/checker/src/context/invocation.rs
@@ -89,9 +89,9 @@ impl CallCheckingBehavior for InvocationContext {
self.0
.iter_mut()
.rev()
- .find_map(|kind| {
+ .find_map(|kind| -> Option<&mut LocalInformation> {
if let InvocationKind::Conditional(info) = kind {
- Some(&mut **info)
+ Some(&mut *info)
} else {
None
}
diff --git a/checker/src/context/mod.rs b/checker/src/context/mod.rs
index e39c2482..8749cdbe 100644
--- a/checker/src/context/mod.rs
+++ b/checker/src/context/mod.rs
@@ -7,6 +7,7 @@ pub mod information;
pub mod invocation;
mod root;
+use information::ObjectProtectionState;
pub(crate) use invocation::CallCheckingBehavior;
pub use root::RootContext;
@@ -518,7 +519,7 @@ impl Context {
}
/// TODO doesn't look at aliases using `get_type_fact`!
- pub fn is_frozen(&self, value: TypeId) -> Option {
+ pub fn get_object_protection(&self, value: TypeId) -> Option {
self.parents_iter().find_map(|ctx| get_on_ctx!(ctx.info.frozen.get(&value))).copied()
}
@@ -526,9 +527,9 @@ impl Context {
// TODO should check the TypeId::is_primitive... via aliases + open_poly
pub(crate) fn _is_immutable(&self, _value: TypeId) -> bool {
todo!()
- // let is_frozen = self.is_frozen(value);
+ // let get_object_protection = self.get_object_protection(value);
- // if is_frozen == Some(TypeId::TRUE) {
+ // if get_object_protection == Some(TypeId::TRUE) {
// true
// } else if let Some(
// Constant::Boolean(..)
diff --git a/checker/src/diagnostics.rs b/checker/src/diagnostics.rs
index 149b4c62..2d1afed0 100644
--- a/checker/src/diagnostics.rs
+++ b/checker/src/diagnostics.rs
@@ -114,18 +114,18 @@ pub struct DiagnosticsContainer {
diagnostics: Vec,
// Quick way to check whether a error was added
#[cfg_attr(feature = "serde-serialize", serde(skip_serializing))]
- has_error: bool,
+ contains_error: bool,
}
// TODO the add methods are the same...
impl DiagnosticsContainer {
#[must_use]
pub fn new() -> Self {
- Self { diagnostics: Default::default(), has_error: false }
+ Self { diagnostics: Default::default(), contains_error: false }
}
pub fn add_error>(&mut self, error: T) {
- self.has_error = true;
+ self.contains_error = true;
self.diagnostics.push(error.into());
}
@@ -138,8 +138,8 @@ impl DiagnosticsContainer {
}
#[must_use]
- pub fn has_error(&self) -> bool {
- self.has_error
+ pub fn contains_error(&self) -> bool {
+ self.contains_error
}
pub fn sources(&self) -> impl Iterator- + '_ {
@@ -153,7 +153,7 @@ impl DiagnosticsContainer {
}
pub fn into_result(self) -> Result {
- if self.has_error {
+ if self.contains_error {
Err(self)
} else {
Ok(self)
diff --git a/checker/src/features/constant_functions.rs b/checker/src/features/constant_functions.rs
index 9c359f1b..ecf8497b 100644
--- a/checker/src/features/constant_functions.rs
+++ b/checker/src/features/constant_functions.rs
@@ -2,7 +2,11 @@ use iterator_endiate::EndiateIteratorExt;
use source_map::SpanWithSource;
use crate::{
- context::{get_on_ctx, information::InformationChain, invocation::CheckThings},
+ context::{
+ get_on_ctx,
+ information::{InformationChain, ObjectProtectionState},
+ invocation::CheckThings,
+ },
events::printing::debug_effects,
features::objects::{ObjectBuilder, Proxy},
types::{
@@ -321,7 +325,27 @@ pub(crate) fn call_constant_function(
if let Some(on) =
(arguments.len() == 1).then(|| arguments[0].non_spread_type().ok()).flatten()
{
- environment.info.frozen.insert(on);
+ environment.info.frozen.insert(on, ObjectProtectionState::Frozen);
+ Ok(ConstantOutput::Value(on))
+ } else {
+ Err(ConstantFunctionError::CannotComputeConstant)
+ }
+ }
+ "seal" => {
+ if let Some(on) =
+ (arguments.len() == 1).then(|| arguments[0].non_spread_type().ok()).flatten()
+ {
+ environment.info.frozen.insert(on, ObjectProtectionState::Sealed);
+ Ok(ConstantOutput::Value(on))
+ } else {
+ Err(ConstantFunctionError::CannotComputeConstant)
+ }
+ }
+ "preventExtensions" => {
+ if let Some(on) =
+ (arguments.len() == 1).then(|| arguments[0].non_spread_type().ok()).flatten()
+ {
+ environment.info.frozen.insert(on, ObjectProtectionState::NoExtensions);
Ok(ConstantOutput::Value(on))
} else {
Err(ConstantFunctionError::CannotComputeConstant)
@@ -331,9 +355,50 @@ pub(crate) fn call_constant_function(
if let Some(on) =
(arguments.len() == 1).then(|| arguments[0].non_spread_type().ok()).flatten()
{
- let is_frozen =
- environment.get_chain_of_info().any(|info| info.frozen.contains(&on));
- Ok(ConstantOutput::Value(if is_frozen { TypeId::TRUE } else { TypeId::FALSE }))
+ let object_protection = environment.get_object_protection(on);
+ let result = if matches!(object_protection, Some(ObjectProtectionState::Frozen)) {
+ TypeId::TRUE
+ } else {
+ // TODO test properties here
+ TypeId::FALSE
+ };
+ Ok(ConstantOutput::Value(result))
+ } else {
+ Err(ConstantFunctionError::CannotComputeConstant)
+ }
+ }
+ "isSealed" => {
+ if let Some(on) =
+ (arguments.len() == 1).then(|| arguments[0].non_spread_type().ok()).flatten()
+ {
+ let object_protection = environment.get_object_protection(on);
+ let result = if matches!(
+ object_protection,
+ Some(ObjectProtectionState::Frozen | ObjectProtectionState::Sealed)
+ ) {
+ TypeId::TRUE
+ } else {
+ // TODO test properties here
+ TypeId::FALSE
+ };
+ Ok(ConstantOutput::Value(result))
+ } else {
+ Err(ConstantFunctionError::CannotComputeConstant)
+ }
+ }
+ "isExtensible" => {
+ if let Some(on) =
+ (arguments.len() == 1).then(|| arguments[0].non_spread_type().ok()).flatten()
+ {
+ // Not this method returns an inverse result
+ let object_protection = environment.get_object_protection(on);
+ let result = if object_protection.is_some() {
+ TypeId::FALSE
+ } else {
+ TypeId::TRUE
+ // TODO test properties here
+ };
+ Ok(ConstantOutput::Value(result))
} else {
Err(ConstantFunctionError::CannotComputeConstant)
}
diff --git a/checker/src/lib.rs b/checker/src/lib.rs
index 39e60306..d7ee7802 100644
--- a/checker/src/lib.rs
+++ b/checker/src/lib.rs
@@ -459,13 +459,13 @@ impl CheckOutput {
pub fn check_project(
entry_points: Vec,
type_definition_files: Vec,
- resolver: T,
+ resolver: &T,
options: TypeCheckOptions,
parser_requirements: A::ParserRequirements,
existing_files: Option>,
) -> CheckOutput {
let mut checking_data =
- CheckingData::::new(options, &resolver, existing_files, parser_requirements);
+ CheckingData::::new(options, resolver, existing_files, parser_requirements);
let mut root = crate::context::RootContext::new_with_primitive_references();
@@ -478,7 +478,7 @@ pub fn check_project(
add_definition_files_to_root(type_definition_files, &mut root, &mut checking_data);
crate::utilities::unpause_debug_mode();
- if checking_data.diagnostics_container.has_error() {
+ if checking_data.diagnostics_container.contains_error() {
return CheckOutput {
types: checking_data.types,
module_contents: checking_data.modules.files,
@@ -753,7 +753,7 @@ pub fn generate_cache(
add_definition_files_to_root(vec![on.to_path_buf()], &mut root, &mut checking_data);
assert!(
- !checking_data.diagnostics_container.has_error(),
+ !checking_data.diagnostics_container.contains_error(),
"found error in definition file {:#?}",
checking_data.diagnostics_container.get_diagnostics()
);
diff --git a/checker/src/options.rs b/checker/src/options.rs
index 84069fd7..5592f4a4 100644
--- a/checker/src/options.rs
+++ b/checker/src/options.rs
@@ -3,6 +3,7 @@
#[cfg_attr(feature = "serde-serialize", derive(serde::Deserialize), serde(default))]
#[cfg_attr(target_family = "wasm", derive(tsify::Tsify))]
#[allow(clippy::struct_excessive_bools)]
+#[derive(Clone)]
pub struct TypeCheckOptions {
/// Parameters cannot be reassigned
pub constant_parameters: bool,
diff --git a/checker/src/synthesis/functions.rs b/checker/src/synthesis/functions.rs
index c5c450b6..ab5dbf65 100644
--- a/checker/src/synthesis/functions.rs
+++ b/checker/src/synthesis/functions.rs
@@ -636,86 +636,104 @@ pub(super) fn synthesise_shape(
environment: &mut Environment,
checking_data: &mut CheckingData,
) -> crate::features::functions::PartialFunction {
- let type_parameters = function.type_parameters.as_ref().map(|type_parameters| {
- super::functions::synthesise_type_parameters(type_parameters, environment, checking_data)
- });
-
- let parameters = function
- .parameters
- .parameters
- .iter()
- .map(|parameter| {
- let parameter_constraint =
- parameter.type_annotation.as_ref().map_or(TypeId::ANY_TYPE, |ta| {
- synthesise_type_annotation(ta, environment, checking_data)
+ environment
+ .new_lexical_environment_fold_into_parent(
+ Scope::FunctionAnnotation {},
+ checking_data,
+ |environment, checking_data| {
+ let type_parameters = function.type_parameters.as_ref().map(|type_parameters| {
+ super::functions::synthesise_type_parameters(
+ type_parameters,
+ environment,
+ checking_data,
+ )
});
- // TODO I think this is correct
- let is_optional = parameter.additionally.is_some();
- let ty = if is_optional {
- checking_data.types.new_or_type(parameter_constraint, TypeId::UNDEFINED_TYPE)
- } else {
- parameter_constraint
- };
-
- SynthesisedParameter {
- name: variable_field_to_string(parameter.name.get_ast_ref()),
- is_optional,
- ty,
- position: parameter.position.with_source(environment.get_source()),
- }
- })
- .collect();
-
- let rest_parameter = function.parameters.rest_parameter.as_ref().map(|rest_parameter| {
- let parameter_constraint =
- rest_parameter.type_annotation.as_ref().map_or(TypeId::ANY_TYPE, |annotation| {
- synthesise_type_annotation(annotation, environment, checking_data)
- });
-
- let item_type = if let TypeId::ERROR_TYPE = parameter_constraint {
- TypeId::ERROR_TYPE
- } else if let Type::PartiallyAppliedGenerics(PartiallyAppliedGenerics {
- on: TypeId::ARRAY_TYPE,
- arguments,
- }) = checking_data.types.get_type_by_id(parameter_constraint)
- {
- if let Some(item) = arguments.get_structure_restriction(TypeId::T_TYPE) {
- item
- } else {
- unreachable!()
- }
- } else {
- crate::utilities::notify!("rest parameter should be array error");
- // checking_data.diagnostics_container.add_error(
- // TypeCheckError::RestParameterAnnotationShouldBeArrayType(rest_parameter.get),
- // );
- TypeId::ERROR_TYPE
- };
-
- let name = variable_field_to_string(&rest_parameter.name);
+ let parameters = function
+ .parameters
+ .parameters
+ .iter()
+ .map(|parameter| {
+ let parameter_constraint =
+ parameter.type_annotation.as_ref().map_or(TypeId::ANY_TYPE, |ta| {
+ synthesise_type_annotation(ta, environment, checking_data)
+ });
+
+ // TODO I think this is correct
+ let is_optional = parameter.additionally.is_some();
+ let ty = if is_optional {
+ checking_data
+ .types
+ .new_or_type(parameter_constraint, TypeId::UNDEFINED_TYPE)
+ } else {
+ parameter_constraint
+ };
+
+ SynthesisedParameter {
+ name: variable_field_to_string(parameter.name.get_ast_ref()),
+ is_optional,
+ ty,
+ position: parameter.position.with_source(environment.get_source()),
+ }
+ })
+ .collect();
+
+ let rest_parameter =
+ function.parameters.rest_parameter.as_ref().map(|rest_parameter| {
+ let parameter_constraint = rest_parameter.type_annotation.as_ref().map_or(
+ TypeId::ANY_TYPE,
+ |annotation| {
+ synthesise_type_annotation(annotation, environment, checking_data)
+ },
+ );
+
+ let item_type = if let TypeId::ERROR_TYPE = parameter_constraint {
+ TypeId::ERROR_TYPE
+ } else if let Type::PartiallyAppliedGenerics(PartiallyAppliedGenerics {
+ on: TypeId::ARRAY_TYPE,
+ arguments,
+ }) = checking_data.types.get_type_by_id(parameter_constraint)
+ {
+ if let Some(item) = arguments.get_structure_restriction(TypeId::T_TYPE)
+ {
+ item
+ } else {
+ unreachable!()
+ }
+ } else {
+ crate::utilities::notify!("rest parameter should be array error");
+ // checking_data.diagnostics_container.add_error(
+ // TypeCheckError::RestParameterAnnotationShouldBeArrayType(rest_parameter.get),
+ // );
+ TypeId::ERROR_TYPE
+ };
+
+ let name = variable_field_to_string(&rest_parameter.name);
+
+ SynthesisedRestParameter {
+ item_type,
+ // This will be overridden when actual synthesis
+ ty: parameter_constraint,
+ name,
+ position: rest_parameter.position.with_source(environment.get_source()),
+ }
+ });
- SynthesisedRestParameter {
- item_type,
- // This will be overridden when actual synthesis
- ty: parameter_constraint,
- name,
- position: rest_parameter.position.with_source(environment.get_source()),
- }
- });
+ let return_type = function.return_type.as_ref().map(|annotation| {
+ ReturnType(
+ synthesise_type_annotation(annotation, environment, checking_data),
+ annotation.get_position().with_source(environment.get_source()),
+ )
+ });
- let return_type = function.return_type.as_ref().map(|annotation| {
- ReturnType(
- synthesise_type_annotation(annotation, environment, checking_data),
- annotation.get_position().with_source(environment.get_source()),
+ crate::features::functions::PartialFunction(
+ type_parameters,
+ SynthesisedParameters { parameters, rest_parameter },
+ return_type,
+ )
+ },
)
- });
-
- crate::features::functions::PartialFunction(
- type_parameters,
- SynthesisedParameters { parameters, rest_parameter },
- return_type,
- )
+ .0
}
/// TODO WIP
diff --git a/checker/src/synthesis/interactive.rs b/checker/src/synthesis/interactive.rs
new file mode 100644
index 00000000..54f8a620
--- /dev/null
+++ b/checker/src/synthesis/interactive.rs
@@ -0,0 +1,90 @@
+/// For the REPL in Ezno's CLI
+use std::{mem, path::PathBuf};
+
+use source_map::{FileSystem, MapFileStore, SourceId, WithPathMap};
+
+use crate::{
+ add_definition_files_to_root, types::printing::print_type, CheckingData, DiagnosticsContainer,
+ RootContext, TypeId,
+};
+
+use super::{block::synthesise_block, expressions::synthesise_multiple_expression};
+
+pub struct State<'a, T: crate::ReadFromFS> {
+ checking_data: CheckingData<'a, T, super::EznoParser>,
+ root: RootContext,
+ source: SourceId,
+}
+
+impl<'a, T: crate::ReadFromFS> State<'a, T> {
+ pub fn new(
+ resolver: &'a T,
+ type_definition_files: Vec,
+ ) -> Result)> {
+ let mut root = RootContext::new_with_primitive_references();
+ let mut checking_data =
+ CheckingData::new(Default::default(), resolver, Default::default(), ());
+
+ add_definition_files_to_root(type_definition_files, &mut root, &mut checking_data);
+
+ if checking_data.diagnostics_container.contains_error() {
+ Err((checking_data.diagnostics_container, checking_data.modules.files))
+ } else {
+ let source =
+ checking_data.modules.files.new_source_id("CLI.tsx".into(), String::default());
+ Ok(Self { checking_data, root, source })
+ }
+ }
+
+ pub fn check_item(
+ &mut self,
+ item: &parser::Module,
+ ) -> Result<(Option, DiagnosticsContainer), DiagnosticsContainer> {
+ let (ty, ..) = self.root.new_lexical_environment_fold_into_parent(
+ crate::Scope::PassThrough { source: self.source },
+ &mut self.checking_data,
+ |environment, checking_data| {
+ if let Some(parser::StatementOrDeclaration::Statement(
+ parser::Statement::Expression(expression),
+ )) = item.items.last()
+ {
+ synthesise_block(
+ &item.items[..(item.items.len() - 1)],
+ environment,
+ checking_data,
+ );
+ let result = synthesise_multiple_expression(
+ expression,
+ environment,
+ checking_data,
+ TypeId::ANY_TYPE,
+ );
+ Some(print_type(result, &checking_data.types, environment, false))
+ } else {
+ synthesise_block(&item.items, environment, checking_data);
+ None
+ }
+ },
+ );
+ let dc = mem::take(&mut self.checking_data.diagnostics_container);
+ if dc.contains_error() {
+ Err(dc)
+ } else {
+ Ok((ty, dc))
+ }
+ }
+
+ #[must_use]
+ pub fn get_source_id(&self) -> SourceId {
+ self.source
+ }
+
+ #[must_use]
+ pub fn get_fs_ref(&self) -> &MapFileStore {
+ &self.checking_data.modules.files
+ }
+
+ pub fn get_fs_mut(&mut self) -> &mut MapFileStore {
+ &mut self.checking_data.modules.files
+ }
+}
diff --git a/checker/src/synthesis/mod.rs b/checker/src/synthesis/mod.rs
index 59da5c06..e9a2ecf3 100644
--- a/checker/src/synthesis/mod.rs
+++ b/checker/src/synthesis/mod.rs
@@ -12,6 +12,7 @@ pub mod expressions;
mod extensions;
pub mod functions;
pub mod hoisting;
+pub mod interactive;
pub mod interfaces;
pub mod statements;
pub mod type_annotations;
@@ -317,96 +318,3 @@ impl StatementOrExpressionVariable for ExpressionPosition {
None
}
}
-
-/// For the REPL in Ezno's CLI
-pub mod interactive {
- use std::{mem, path::PathBuf};
-
- use source_map::{FileSystem, MapFileStore, SourceId, WithPathMap};
-
- use crate::{
- add_definition_files_to_root, types::printing::print_type, CheckingData,
- DiagnosticsContainer, RootContext, TypeId,
- };
-
- use super::{block::synthesise_block, expressions::synthesise_multiple_expression};
-
- pub struct State<'a, T: crate::ReadFromFS> {
- checking_data: CheckingData<'a, T, super::EznoParser>,
- root: RootContext,
- source: SourceId,
- }
-
- impl<'a, T: crate::ReadFromFS> State<'a, T> {
- pub fn new(
- resolver: &'a T,
- type_definition_files: Vec,
- ) -> Result)> {
- let mut root = RootContext::new_with_primitive_references();
- let mut checking_data =
- CheckingData::new(Default::default(), resolver, Default::default(), ());
-
- add_definition_files_to_root(type_definition_files, &mut root, &mut checking_data);
-
- if checking_data.diagnostics_container.has_error() {
- Err((checking_data.diagnostics_container, checking_data.modules.files))
- } else {
- let source =
- checking_data.modules.files.new_source_id("CLI.tsx".into(), String::default());
- Ok(Self { checking_data, root, source })
- }
- }
-
- pub fn check_item(
- &mut self,
- item: &parser::Module,
- ) -> Result<(Option, DiagnosticsContainer), DiagnosticsContainer> {
- let (ty, ..) = self.root.new_lexical_environment_fold_into_parent(
- crate::Scope::PassThrough { source: self.source },
- &mut self.checking_data,
- |environment, checking_data| {
- if let Some(parser::StatementOrDeclaration::Statement(
- parser::Statement::Expression(expression),
- )) = item.items.last()
- {
- synthesise_block(
- &item.items[..(item.items.len() - 1)],
- environment,
- checking_data,
- );
- let result = synthesise_multiple_expression(
- expression,
- environment,
- checking_data,
- TypeId::ANY_TYPE,
- );
- Some(print_type(result, &checking_data.types, environment, false))
- } else {
- synthesise_block(&item.items, environment, checking_data);
- None
- }
- },
- );
- let dc = mem::take(&mut self.checking_data.diagnostics_container);
- if dc.has_error() {
- Err(dc)
- } else {
- Ok((ty, dc))
- }
- }
-
- #[must_use]
- pub fn get_source_id(&self) -> SourceId {
- self.source
- }
-
- #[must_use]
- pub fn get_fs_ref(&self) -> &MapFileStore {
- &self.checking_data.modules.files
- }
-
- pub fn get_fs_mut(&mut self) -> &mut MapFileStore {
- &mut self.checking_data.modules.files
- }
- }
-}
diff --git a/checker/src/types/printing.rs b/checker/src/types/printing.rs
index dc19a2f9..2d012a91 100644
--- a/checker/src/types/printing.rs
+++ b/checker/src/types/printing.rs
@@ -58,6 +58,24 @@ pub fn print_type_with_type_arguments(
buf
}
+pub fn print_inner_template_literal_type_into_buf(
+ ty: TypeId,
+ buf: &mut String,
+ cycles: &mut HashSet,
+ args: GenericChain,
+ types: &TypeStore,
+ info: &C,
+ debug: bool,
+) {
+ if let Type::Constant(cst) = types.get_type_by_id(ty) {
+ buf.push_str(&cst.as_js_string());
+ } else {
+ buf.push_str("${");
+ print_type_into_buf(ty, buf, cycles, args, types, info, debug);
+ buf.push('}');
+ }
+}
+
/// Recursion safe + reuses buffer
pub fn print_type_into_buf(
ty: TypeId,
@@ -489,7 +507,25 @@ pub fn print_type_into_buf(
unreachable!()
}
},
- _constructor => {
+ constructor => {
+ if let Constructor::BinaryOperator { result: result_ty, lhs, rhs, .. } = constructor
+ {
+ if *result_ty != TypeId::NUMBER_TYPE
+ && !matches!(
+ types.get_type_by_id(*result_ty),
+ Type::PartiallyAppliedGenerics(_) | Type::RootPolyType(_)
+ ) {
+ buf.push('`');
+ print_inner_template_literal_type_into_buf(
+ *lhs, buf, cycles, args, types, info, debug,
+ );
+ print_inner_template_literal_type_into_buf(
+ *rhs, buf, cycles, args, types, info, debug,
+ );
+ buf.push('`');
+ return;
+ }
+ }
let base = get_constraint(ty, types).unwrap();
print_type_into_buf(base, buf, cycles, args, types, info, debug);
}
diff --git a/checker/src/types/properties/assignment.rs b/checker/src/types/properties/assignment.rs
index ff545848..ee573cbd 100644
--- a/checker/src/types/properties/assignment.rs
+++ b/checker/src/types/properties/assignment.rs
@@ -1,7 +1,7 @@
use super::{get_property_unbound, Descriptor, PropertyKey, PropertyValue, Publicity};
use crate::{
- context::CallCheckingBehavior,
+ context::{information::ObjectProtectionState, CallCheckingBehavior},
diagnostics::{PropertyKeyRepresentation, TypeStringRepresentation},
events::Event,
features::objects::Proxy,
@@ -60,8 +60,11 @@ pub fn set_property(
types: &mut TypeStore,
) -> SetPropertyResult {
// Frozen checks
+ let object_protection = environment.get_object_protection(on);
+
{
- if environment.info.frozen.contains(&on) {
+ if let Some(ObjectProtectionState::Frozen) = object_protection {
+ // FUTURE this could have a separate error?
return Err(SetPropertyError::NotWriteable {
property: PropertyKeyRepresentation::new(under, environment, types),
position,
@@ -269,13 +272,15 @@ pub fn set_property(
),
}
} else {
- let on_type = types.get_type_by_id(on);
- crate::utilities::notify!("{:?}", on_type);
- if get_constraint(on, types).is_some() {
- return Err(SetPropertyError::AssigningToNonExistent {
- property: PropertyKeyRepresentation::new(under, environment, types),
- position,
- });
+ // Sealed & no extensions check for NEW property (frozen case covered above)
+ {
+ if object_protection.is_some() {
+ // FUTURE this could have a separate error?
+ return Err(SetPropertyError::NotWriteable {
+ property: PropertyKeyRepresentation::new(under, environment, types),
+ position,
+ });
+ }
}
crate::utilities::notify!("No property on object, assigning anyway");
diff --git a/checker/src/types/subtyping.rs b/checker/src/types/subtyping.rs
index 037601bf..7af1ac9a 100644
--- a/checker/src/types/subtyping.rs
+++ b/checker/src/types/subtyping.rs
@@ -716,7 +716,9 @@ pub(crate) fn type_is_subtype_with_generics(
information,
types,
)
- } else if information.get_chain_of_info().any(|info| info.frozen.contains(&ty))
+ } else if information
+ .get_chain_of_info()
+ .any(|info| info.frozen.contains_key(&ty))
|| matches!(subtype, Type::Constant(_))
|| matches!(
ty,
diff --git a/checker/tests/partial_source.rs b/checker/tests/partial_source.rs
index 867b2f96..3b74f56b 100644
--- a/checker/tests/partial_source.rs
+++ b/checker/tests/partial_source.rs
@@ -21,7 +21,7 @@ fn partial_checking() {
let result = check_project::<_, synthesis::EznoParser>(
vec![root.into()],
type_definition_files,
- |_path: &std::path::Path| Some(text.to_owned()),
+ &|_path: &std::path::Path| Some(text.to_owned()),
options,
(),
None,
diff --git a/checker/tests/suggestions.rs b/checker/tests/suggestions.rs
index 4a9f3e41..301b4a52 100644
--- a/checker/tests/suggestions.rs
+++ b/checker/tests/suggestions.rs
@@ -41,7 +41,7 @@ console.log(obj2.proberly);
let result = check_project::<_, ezno_checker::synthesis::EznoParser>(
vec![root.into()],
type_definition_files,
- resolver,
+ &resolver,
options,
(),
None,
diff --git a/checker/tests/type_mappings.rs b/checker/tests/type_mappings.rs
index e6eef489..bccb65c3 100644
--- a/checker/tests/type_mappings.rs
+++ b/checker/tests/type_mappings.rs
@@ -17,7 +17,7 @@ y()";
let result = check_project::<_, synthesis::EznoParser>(
vec![root.into()],
type_definition_files,
- |_path: &std::path::Path| Some(text.to_owned()),
+ &|_path: &std::path::Path| Some(text.to_owned()),
options,
(),
None,
diff --git a/parser/examples/simple.rs b/parser/examples/simple.rs
new file mode 100644
index 00000000..46475a22
--- /dev/null
+++ b/parser/examples/simple.rs
@@ -0,0 +1,9 @@
+#[allow(unused)]
+use ezno_parser::{ASTNode, Expression, Module};
+
+fn main() {
+ let source = "'Hello World!'".to_owned();
+ let parse_options = Default::default();
+ let result = Expression::from_string_with_options(source.clone(), parse_options, Some(40));
+ eprintln!("{result:#?}");
+}
diff --git a/parser/src/block.rs b/parser/src/block.rs
index 79739b59..e37b0f20 100644
--- a/parser/src/block.rs
+++ b/parser/src/block.rs
@@ -393,10 +393,11 @@ pub(crate) fn parse_statements_and_declarations(
expect_semi_colon(reader, &state.line_starts, end, options)?
} else if options.retain_blank_lines {
let Token(kind, next) = reader.peek().ok_or_else(crate::parse_lexing_error)?;
- let lines = state.line_starts.byte_indexes_crosses_lines(end as usize, next.0 as usize);
if let TSXToken::EOS = kind {
- lines
+ 1
} else {
+ let lines =
+ state.line_starts.byte_indexes_crosses_lines(end as usize, next.0 as usize);
lines.saturating_sub(1)
}
} else {
diff --git a/parser/src/lib.rs b/parser/src/lib.rs
index 7944ac01..93633225 100644
--- a/parser/src/lib.rs
+++ b/parser/src/lib.rs
@@ -266,10 +266,10 @@ pub(crate) fn throw_unexpected_token_with_token(
#[derive(Debug)]
pub struct ParsingState {
- pub(crate) line_starts: source_map::LineStarts,
- pub(crate) length_of_source: u32,
+ pub line_starts: source_map::LineStarts,
+ pub length_of_source: u32,
/// TODO as multithreaded channel + record is dynamic exists
- pub(crate) constant_imports: Vec,
+ pub constant_imports: Vec,
pub keyword_positions: Option,
pub partial_points: Vec,
}
diff --git a/parser/src/modules.rs b/parser/src/modules.rs
index f284b658..777e575f 100644
--- a/parser/src/modules.rs
+++ b/parser/src/modules.rs
@@ -44,7 +44,8 @@ impl ASTNode for Module {
state: &mut crate::ParsingState,
options: &ParseOptions,
) -> ParseResult {
- let span = Span { start: 0, source: (), end: state.length_of_source };
+ let start = reader.peek().map(|t| t.1 .0).unwrap_or_default();
+ let span = Span { start, source: (), end: start + state.length_of_source };
let hashbang_comment = if let Some(crate::Token(TSXToken::HashBangComment(_), _)) =
reader.peek()
{
diff --git a/src/ast_explorer.rs b/src/ast_explorer.rs
index 7879d421..d8fd7b8a 100644
--- a/src/ast_explorer.rs
+++ b/src/ast_explorer.rs
@@ -1,16 +1,12 @@
#![allow(dead_code)]
-use std::{fs, path::PathBuf};
+use std::path::PathBuf;
use argh::FromArgs;
-use console::style;
use enum_variants_strings::EnumVariantsStrings;
use parser::{source_map::FileSystem, ASTNode, Expression, Module, ToStringOptions};
-use crate::{
- reporting::report_diagnostics_to_cli,
- utilities::{print_to_cli, print_to_cli_without_newline},
-};
+use crate::{reporting::report_diagnostics_to_cli, utilities::print_to_cli};
/// REPL for printing out AST from user input
#[derive(FromArgs, Debug)]
@@ -24,23 +20,25 @@ pub(crate) struct ExplorerArguments {
}
impl ExplorerArguments {
+ #[cfg(target_family = "wasm")]
+ pub(crate) fn run(&mut self, _fs_resolver: &T) {
+ panic!("Cannot run ast-explorer in WASM because of input callback. Consider reimplementing using library");
+ }
+
#[allow(clippy::needless_continue)]
- pub(crate) fn run(
- &mut self,
- fs_resolver: &T,
- cli_input_resolver: U,
- ) {
+ #[cfg(not(target_family = "wasm"))]
+ pub(crate) fn run(&mut self, fs_resolver: &T) {
if let Some(ref file) = self.file {
- let content = fs_resolver.get_content_at_path(file);
+ let content = fs_resolver.read_file(file);
if let Some(content) = content {
- self.nested.run(content, Some(file.to_owned()));
+ self.nested.run(String::from_utf8(content).unwrap(), Some(file.to_owned()));
} else {
eprintln!("Could not find file at {}", file.display());
}
} else {
print_to_cli(format_args!("ezno ast-explorer\nUse #exit to leave. Also #switch-mode *mode name* and #load-file *path*"));
loop {
- let input = cli_input_resolver(self.nested.to_str()).unwrap_or_default();
+ let input = crate::utilities::cli_input_resolver(self.nested.to_str());
if input.is_empty() {
continue;
@@ -55,7 +53,7 @@ impl ExplorerArguments {
}
};
} else if let Some(path) = input.strip_prefix("#load-file ") {
- let input = match fs::read_to_string(path.trim()) {
+ let input = match std::fs::read_to_string(path.trim()) {
Ok(string) => string,
Err(err) => {
print_to_cli(format_args!("{err:?}"));
@@ -80,7 +78,6 @@ pub(crate) enum ExplorerSubCommand {
FullAST(FullASTArgs),
Prettifier(PrettyArgs),
Uglifier(UglifierArgs),
- Lexer(LexerArgs),
}
/// Prints AST for a given expression
@@ -99,6 +96,9 @@ pub(crate) struct FullASTArgs {
/// print results as json
#[argh(switch)]
json: bool,
+ /// just print whether parse was successful
+ #[argh(switch)]
+ check: bool,
}
/// Prettifies source code (full whitespace)
@@ -111,11 +111,6 @@ pub(crate) struct PrettyArgs {}
#[argh(subcommand, name = "uglifier")]
pub(crate) struct UglifierArgs {}
-/// Prints sources with tokens over
-#[derive(FromArgs, Debug, Default)]
-#[argh(subcommand, name = "lexer")]
-pub(crate) struct LexerArgs {}
-
impl ExplorerSubCommand {
pub fn run(&self, input: String, path: Option) {
match self {
@@ -148,11 +143,20 @@ impl ExplorerSubCommand {
ExplorerSubCommand::FullAST(cfg) => {
let mut fs =
parser::source_map::MapFileStore::::default();
- let source_id = fs.new_source_id(path.unwrap_or_default(), input.clone());
+ let source_id = fs.new_source_id(path.clone().unwrap_or_default(), input.clone());
let res = Module::from_string(input, parser::ParseOptions::all_features());
match res {
Ok(res) => {
- if cfg.json {
+ if cfg.check {
+ if let Some(ref path) = path {
+ print_to_cli(format_args!(
+ "{path} parsed successfully",
+ path = path.display()
+ ));
+ } else {
+ print_to_cli(format_args!("Parsed successfully",));
+ }
+ } else if cfg.json {
print_to_cli(format_args!(
"{}",
serde_json::to_string_pretty(&res).unwrap()
@@ -194,28 +198,6 @@ impl ExplorerSubCommand {
.unwrap(),
}
}
- ExplorerSubCommand::Lexer(_) => {
- let mut color = console::Color::Red;
- for (section, with) in parser::script_to_tokens(input) {
- if with {
- let value = style(section).bg(color);
- // Cycle through colors
- color = match color {
- console::Color::Red => console::Color::Green,
- console::Color::Green => console::Color::Yellow,
- console::Color::Yellow => console::Color::Blue,
- console::Color::Blue => console::Color::Magenta,
- console::Color::Magenta => console::Color::Cyan,
- console::Color::Cyan => console::Color::Red,
- _ => unreachable!(),
- };
- print_to_cli_without_newline(format_args!("{value}"));
- } else {
- print_to_cli_without_newline(format_args!("{section}"));
- }
- }
- print_to_cli(format_args!(""));
- }
}
}
}
diff --git a/src/build.rs b/src/build.rs
index 0bc7f94f..e2b4fcb0 100644
--- a/src/build.rs
+++ b/src/build.rs
@@ -1,11 +1,8 @@
-use std::{
- mem,
- path::{Path, PathBuf},
-};
+use std::{collections::HashMap, mem, path::PathBuf};
-use checker::{DiagnosticsContainer, TypeCheckOptions};
+use checker::TypeCheckOptions;
use parser::{
- source_map::{MapFileStore, SourceMap, WithPathMap},
+ source_map::{SourceId, SourceMap, WithPathMap},
ToStringOptions,
};
@@ -17,45 +14,53 @@ pub struct Output {
pub mappings: SourceMap,
}
-#[cfg_attr(target_family = "wasm", derive(serde::Serialize, tsify::Tsify))]
pub struct BuildOutput {
- pub outputs: Vec