diff --git a/demo/test-cases.html b/demo/test-cases.html index 1978e57..244e2a3 100644 --- a/demo/test-cases.html +++ b/demo/test-cases.html @@ -70,6 +70,11 @@

Operators

\`foo bar` ı.^\^ x.^ \(n) + <<< [] >>> + + + \`lim sup`._(n -> oo) + sum_(n=0)^k a_n = a_0 + a_1 + cdots + a_k @@ -407,6 +412,10 @@

Forced identifiers

it sf`AaBbCc` tt`1234` i rm i + + + obrace(a\`↑↑`b = ubrace(a^a^⋰^a)._(b "times")).^"" "up-arrow" notation "" + @@ -468,8 +477,15 @@

Accents

ddot x tilde x ddot i , bar i , hat j , ul j - 3hat(xyz) vec x = a hat i + b hat j + c hat k + + + 3oparen(a + bx) + + + + oparen a+b uparen c+d oshell e+f ushell g+h obracket i+j ubracket k+l + diff --git a/docs/index.html b/docs/index.html index 14edb42..d0ef629 100644 --- a/docs/index.html +++ b/docs/index.html @@ -546,6 +546,11 @@

Operators

± +
-+
+
+ +
+
*
· @@ -626,6 +631,11 @@

Operators

+
!==
+
+ +
+
o+
@@ -722,7 +732,7 @@

Operators

-
prop
+
oc, prop
@@ -767,15 +777,25 @@

Operators

-
diamond
+
<>, diamond
-
square
+
[], square
+ +
<|
+
+ +
+ +
|>
+
+ +
@@ -830,6 +850,16 @@

Operators

+ +
<<<
+
+ +
+ +
>>>
+
+ +
@@ -1001,6 +1031,10 @@

Operators

diff --git a/src/compiler/tokenizer/lexemes.js b/src/compiler/tokenizer/lexemes.js index 015a7a6..17b35ae 100644 --- a/src/compiler/tokenizer/lexemes.js +++ b/src/compiler/tokenizer/lexemes.js @@ -143,6 +143,7 @@ export const KNOWN_IDENTS = new Map([ export const KNOWN_OPS = new Map([ ["-", { value: "−" }], ["!=", { value: "≠" }], + ["!==", { value: "≢" }], ["!in", { value: "∉" }], [".$", { value: "\u2061" }], [".*", { value: "\u2062" }], @@ -156,6 +157,7 @@ export const KNOWN_OPS = new Map([ ["**", { value: "∗" }], ["***", { value: "⋆" }], ["+-", { value: "±" }], + ["-+", { value: "∓" }], ["-:", { value: "÷" }], ["-<", { value: "≺" }], ["-<=", { value: "⪯" }], @@ -167,8 +169,11 @@ export const KNOWN_OPS = new Map([ ["/_", { value: "∠" }], [":.", { value: "∴" }], ["<-", { value: "←" }], + ["<<<", { value: "≪" }], ["<=", { value: "≤" }], ["<=>", { value: "⇔" }], + ["<>", { value: "⋄" }], + ["<|", { value: "⊲" }], ["==", { value: "≡" }], ["=>", { value: "⇒" }], [">-", { value: "≻" }], @@ -177,10 +182,12 @@ export const KNOWN_OPS = new Map([ [">->>", { value: "⤖" }], ["><|", { value: "⋊" }], [">=", { value: "≥" }], + [">>>", { value: "≫" }], ["@", { value: "∘" }], ["AA", { value: "∀" }], ["EE", { value: "∃" }], ["TT", { value: "⊤" }], + ["[]", { value: "□" }], ["^^", { value: "∧" }], ["^^^", { value: "⋀" }], ["_|_", { value: "⊥" }], @@ -208,6 +215,7 @@ export const KNOWN_OPS = new Map([ ["not", { value: "¬" }], ["o+", { value: "⊕" }], ["o.", { value: "⊙" }], + ["oc", { value: "∝" }], ["oint", { value: "∮" }], ["or", { value: "or" }], ["otherwise", { value: "otherwise" }], @@ -233,6 +241,7 @@ export const KNOWN_OPS = new Map([ ["|--", { value: "⊢" }], ["|->", { value: "↦" }], ["|==", { value: "⊨" }], + ["|>", { value: "⊳" }], ["|><", { value: "⋉" }], ["|><|", { value: "⋈" }], ["~=", { value: "≅" }], @@ -262,13 +271,20 @@ export const KNOWN_PARENS_CLOSE = new Map([ export const KNOWN_PREFIX = new Map([ // Accents ["bar", { name: "over", accent: "‾" }], - ["obrace", { name: "over", accent: "⏞" }], ["ddot", { name: "over", accent: "⋅⋅" }], ["dot", { name: "over", accent: "⋅" }], ["hat", { name: "over", accent: "^" }], + ["obrace", { name: "over", accent: "⏞" }], + ["obracket", { name: "over", accent: "⎴" }], + ["oparen", { name: "over", accent: "⏜" }], + ["oshell", { name: "over", accent: "⏠" }], ["tilde", { name: "over", accent: "˜" }], ["ubrace", { name: "under", accent: "⏟" }], + ["ubrace", { name: "under", accent: "⏟" }], + ["ubracket", { name: "under", accent: "⎵" }], ["ul", { name: "under", accent: "_" }], + ["uparen", { name: "under", accent: "⏝" }], + ["ushell", { name: "under", accent: "⏡" }], ["vec", { name: "over", accent: "→" }], // Groups diff --git a/src/compiler/tokenizer/scanners/paren-close.js b/src/compiler/tokenizer/scanners/paren-close.js index 1f40d3c..dd119e6 100644 --- a/src/compiler/tokenizer/scanners/paren-close.js +++ b/src/compiler/tokenizer/scanners/paren-close.js @@ -1,4 +1,4 @@ -import { KNOWN_PARENS_CLOSE, isPunctClose } from "../lexemes.js"; +import { KNOWN_OPS, KNOWN_PARENS_CLOSE, isPunctClose } from "../lexemes.js"; /** * @param {string} partial @@ -33,6 +33,15 @@ export default function parenCloseScanner(char, input, { grouping, start }) { } } + { + const [nextChar] = input.slice(start + value.length); + const nextValue = value + nextChar; + + if (KNOWN_OPS.has(nextValue)) { + return null; + } + } + const known = KNOWN_PARENS_CLOSE.get(value); if (known) { diff --git a/src/compiler/tokenizer/scanners/paren-open.js b/src/compiler/tokenizer/scanners/paren-open.js index 13d0a8c..cca4679 100644 --- a/src/compiler/tokenizer/scanners/paren-open.js +++ b/src/compiler/tokenizer/scanners/paren-open.js @@ -1,4 +1,4 @@ -import { KNOWN_PARENS_OPEN, isPunctOpen } from "../lexemes.js"; +import { KNOWN_OPS, KNOWN_PARENS_OPEN, isPunctOpen } from "../lexemes.js"; /** * @param {string} partial @@ -29,6 +29,15 @@ export default function parenOpenScanner(char, input, { start }) { } } + { + const [nextChar] = input.slice(start + value.length); + const nextValue = value + nextChar; + + if (KNOWN_OPS.has(nextValue)) { + return null; + } + } + const known = KNOWN_PARENS_OPEN.get(value); if (known) { diff --git a/src/compiler/tokenizer/scanners/paren-open.test.js b/src/compiler/tokenizer/scanners/paren-open.test.js index 8e712d3..1b3a36c 100644 --- a/src/compiler/tokenizer/scanners/paren-open.test.js +++ b/src/compiler/tokenizer/scanners/paren-open.test.js @@ -35,3 +35,11 @@ test("combined paren-open", (t) => { end: 2, }); }); + +test("paren open is also an operator", (t) => { + t.is(parenOpen("<", "<<<", { start: 0 }), null); +}); + +test("paren close is also an operator", (t) => { + t.is(parenOpen(">", ">>>", { start: 0 }), null); +}); diff --git a/test/accents.js b/test/accents.js index bb8c3e7..3c7a6e1 100644 --- a/test/accents.js +++ b/test/accents.js @@ -29,8 +29,16 @@ test("Dotless variants", (t) => { t.snapshot(render("ul j")); }); -test("Should put accents over all the following parenthesis", (t) => { - t.snapshot(render("3hat(xyz)")); +test("Should put accents over all in the following parenthesis", (t) => { + t.snapshot(render("3oparen(a + bx)")); +}); + +test("Ties together", (t) => { + t.snapshot( + render( + "oparen a+b uparen c+d oshell e+f ushell g+h obracket i+j ubracket k+l", + ), + ); }); test("Physics vector notation", (t) => { diff --git a/test/operators.js b/test/operators.js index 2d6c399..93b6787 100644 --- a/test/operators.js +++ b/test/operators.js @@ -25,6 +25,10 @@ test("Only force an operator when \\ precedes a character", (t) => { t.snapshot(render("\\")); }); +test("Operators that could be open and close parens", (t) => { + t.snapshot(render("<<< [] >>>")); +}); + test("i hat", (t) => { t.snapshot(render("ı.^\\^")); }); diff --git a/test/snapshots/accents.js.md b/test/snapshots/accents.js.md index d3645e1..f5bb629 100644 --- a/test/snapshots/accents.js.md +++ b/test/snapshots/accents.js.md @@ -60,11 +60,17 @@ Generated by [AVA](https://avajs.dev). 'j_' -## Should put accents over all the following parenthesis +## Should put accents over all in the following parenthesis > Snapshot 1 - '3xyz^' + '3a+bx' + +## Ties together + +> Snapshot 1 + + 'a+bc+de+fg+hi+jk+l' ## Physics vector notation diff --git a/test/snapshots/accents.js.snap b/test/snapshots/accents.js.snap index f9bb841..b4c1ab0 100644 Binary files a/test/snapshots/accents.js.snap and b/test/snapshots/accents.js.snap differ diff --git a/test/snapshots/operators.js.md b/test/snapshots/operators.js.md index a014977..08cd184 100644 --- a/test/snapshots/operators.js.md +++ b/test/snapshots/operators.js.md @@ -24,6 +24,12 @@ Generated by [AVA](https://avajs.dev). '\\' +## Operators that could be open and close parens + +> Snapshot 1 + + '' + ## i hat > Snapshot 1 diff --git a/test/snapshots/operators.js.snap b/test/snapshots/operators.js.snap index ae9ab0b..33c6fa2 100644 Binary files a/test/snapshots/operators.js.snap and b/test/snapshots/operators.js.snap differ