diff --git a/core/deno.json b/core/deno.json index abbaff0..0e6e02d 100644 --- a/core/deno.json +++ b/core/deno.json @@ -6,5 +6,5 @@ }, "name": "@dalbit-yaksok/core", "exports": "./mod.ts", - "version": "0.2.0-RC.6" + "version": "0.2.0-RC.7" } \ No newline at end of file diff --git a/core/prepare/parse/dynamicRule/functions/invoke-rule.ts b/core/prepare/parse/dynamicRule/functions/invoke-rule.ts index 508a854..2709021 100644 --- a/core/prepare/parse/dynamicRule/functions/invoke-rule.ts +++ b/core/prepare/parse/dynamicRule/functions/invoke-rule.ts @@ -6,8 +6,7 @@ import type { FunctionTemplate, FunctionTemplatePiece, } from '../../../../type/function-template.ts' -import type { PatternUnit } from '../../rule.ts' -import type { Rule } from '../../rule.ts' +import type { Rule, PatternUnit } from '../../type.ts' interface VariantedPart { index: number diff --git a/core/prepare/parse/dynamicRule/mention/create-mentioning-rules.ts b/core/prepare/parse/dynamicRule/mention/create-mentioning-rules.ts index 02367ee..306d0a5 100644 --- a/core/prepare/parse/dynamicRule/mention/create-mentioning-rules.ts +++ b/core/prepare/parse/dynamicRule/mention/create-mentioning-rules.ts @@ -1,9 +1,10 @@ +import { getTokensFromNodes } from '../../../../util/merge-tokens.ts' import { Mention, MentionScope } from '../../../../node/mention.ts' import { FunctionInvoke } from '../../../../node/function.ts' import { Identifier, Node } from '../../../../node/base.ts' -import { Rule } from '../../rule.ts' import { Token } from '../../../tokenize/token.ts' -import { getTokensFromNodes } from '../../../../util/merge-tokens.ts' + +import type { Rule } from '../../type.ts' export function createMentioningRule( fileName: string, diff --git a/core/prepare/parse/dynamicRule/mention/index.ts b/core/prepare/parse/dynamicRule/mention/index.ts index 748d525..3c7194d 100644 --- a/core/prepare/parse/dynamicRule/mention/index.ts +++ b/core/prepare/parse/dynamicRule/mention/index.ts @@ -1,11 +1,12 @@ import { getMentioningFiles } from './mentioning-files.ts' import { getExportedRules } from './get-exported-rules.ts' -import type { CodeFile } from '../../../../type/code-file.ts' +import { FileForRunNotExistError } from '../../../../error/prepare.ts' import { ErrorInModuleError } from '../../../../error/mention.ts' import { TOKEN_TYPE } from '../../../tokenize/token.ts' -import { Rule } from '../../rule.ts' -import { FileForRunNotExistError } from '../../../../error/prepare.ts' + +import type { CodeFile } from '../../../../type/code-file.ts' +import type { Rule } from '../../type.ts' export function getRulesFromMentioningFile(codeFile: CodeFile): Rule[] { if (!codeFile.mounted) { diff --git a/core/prepare/parse/index.ts b/core/prepare/parse/index.ts index 37a34da..eba53ef 100644 --- a/core/prepare/parse/index.ts +++ b/core/prepare/parse/index.ts @@ -7,7 +7,7 @@ import { parseIndent } from './parse-indent.ts' import { Block } from '../../node/block.ts' import type { CodeFile } from '../../type/code-file.ts' -import type { Rule } from './rule.ts' +import type { Rule } from './type.ts' import { getTokensFromNodes } from '../../util/merge-tokens.ts' import { YaksokError } from '../../error/common.ts' diff --git a/core/prepare/parse/parse-indent.ts b/core/prepare/parse/parse-indent.ts index e3bbf01..30bb850 100644 --- a/core/prepare/parse/parse-indent.ts +++ b/core/prepare/parse/parse-indent.ts @@ -46,6 +46,7 @@ export function parseIndent(_tokens: Node[], indent = 0) { const childTokens = getTokensFromNodes(child) groups.push(new Block(child, childTokens)) + groups.push(new EOL([])) } else { groups.push(token) } diff --git a/core/prepare/parse/rule.ts b/core/prepare/parse/rule.ts index 990eb5a..5fe1694 100644 --- a/core/prepare/parse/rule.ts +++ b/core/prepare/parse/rule.ts @@ -23,7 +23,6 @@ import { Loop, MinusOperator, MultiplyOperator, - type Node, Operator, PlusOperator, Print, @@ -40,22 +39,7 @@ import { import { ListLoop } from '../../node/listLoop.ts' import { IndexedValue } from '../../value/indexed.ts' import { NumberValue, StringValue } from '../../value/primitive.ts' - -import type { Token } from '../tokenize/token.ts' - -export interface PatternUnit { - type: { - new (...args: any[]): Node - } - value?: string - as?: string -} - -export type Rule = { - pattern: PatternUnit[] - factory: (nodes: Node[], tokens: Token[]) => Node - config?: Record -} +import { Rule, RULE_FLAGS } from './type.ts' export const BASIC_RULES: Rule[][] = [ [ @@ -344,6 +328,7 @@ export const BASIC_RULES: Rule[][] = [ tokens, ) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -391,6 +376,7 @@ export const BASIC_RULES: Rule[][] = [ tokens, ) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, ], ] @@ -468,6 +454,7 @@ export const ADVANCED_RULES: Rule[] = [ return new SetToIndex(target, value, tokens) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -481,9 +468,6 @@ export const ADVANCED_RULES: Rule[] = [ { type: Evaluable, }, - { - type: EOL, - }, ], factory: (nodes, tokens) => { const name = (nodes[0] as Identifier).value @@ -491,19 +475,24 @@ export const ADVANCED_RULES: Rule[] = [ return new SetVariable(name, value, tokens) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ { type: IfStatement, }, + { + type: EOL, + }, { type: ElseIfStatement, }, ], factory: (nodes, tokens) => { - const [ifStatement, elseIfStatement] = nodes as [ + const [ifStatement, _, elseIfStatement] = nodes as [ IfStatement, + EOL, ElseIfStatement, ] @@ -514,19 +503,24 @@ export const ADVANCED_RULES: Rule[] = [ return ifStatement }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ { type: IfStatement, }, + { + type: EOL, + }, { type: ElseStatement, }, ], factory: (nodes, tokens) => { - const [ifStatement, elseStatement] = nodes as [ + const [ifStatement, _, elseStatement] = nodes as [ IfStatement, + EOL, ElseStatement, ] @@ -539,6 +533,7 @@ export const ADVANCED_RULES: Rule[] = [ return ifStatement }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -570,6 +565,7 @@ export const ADVANCED_RULES: Rule[] = [ return new ElseIfStatement({ condition, body }, tokens) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -589,6 +585,7 @@ export const ADVANCED_RULES: Rule[] = [ return new ElseStatement(body, tokens) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -616,6 +613,7 @@ export const ADVANCED_RULES: Rule[] = [ return new IfStatement([{ condition, body }], tokens) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -631,6 +629,7 @@ export const ADVANCED_RULES: Rule[] = [ const value = nodes[0] as Evaluable return new Print(value, tokens) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -646,6 +645,7 @@ export const ADVANCED_RULES: Rule[] = [ }, ], factory: (nodes, tokens) => new Loop(nodes[2] as Block, tokens), + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -659,6 +659,7 @@ export const ADVANCED_RULES: Rule[] = [ }, ], factory: (_nodes, tokens) => new Break(tokens), + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -672,6 +673,7 @@ export const ADVANCED_RULES: Rule[] = [ }, ], factory: (_nodes, tokens) => new Return(tokens), + flags: [RULE_FLAGS.IS_STATEMENT], }, { pattern: [ @@ -707,5 +709,6 @@ export const ADVANCED_RULES: Rule[] = [ return new ListLoop(list, name, body, tokens) }, + flags: [RULE_FLAGS.IS_STATEMENT], }, ] diff --git a/core/prepare/parse/satisfiesPattern.ts b/core/prepare/parse/satisfiesPattern.ts index 5de3928..d10013e 100644 --- a/core/prepare/parse/satisfiesPattern.ts +++ b/core/prepare/parse/satisfiesPattern.ts @@ -1,5 +1,5 @@ -import type { PatternUnit } from './rule.ts' import type { Node } from '../../node/index.ts' +import type { PatternUnit } from './type.ts' export function satisfiesPattern(tokens: Node[], pattern: PatternUnit[]) { return pattern.every((pattern, index) => { diff --git a/core/prepare/parse/srParse.ts b/core/prepare/parse/srParse.ts index 260c80d..865836d 100644 --- a/core/prepare/parse/srParse.ts +++ b/core/prepare/parse/srParse.ts @@ -1,42 +1,53 @@ -import { type Rule, ADVANCED_RULES, BASIC_RULES } from './rule.ts' +import { ADVANCED_RULES, BASIC_RULES } from './rule.ts' import { satisfiesPattern } from './satisfiesPattern.ts' import { Block } from '../../node/block.ts' -import { TOKEN_TYPE } from '../tokenize/token.ts' -import { EOL } from '../../node/misc.ts' import type { Node } from '../../node/base.ts' import { getTokensFromNodes } from '../../util/merge-tokens.ts' +import { Rule, RULE_FLAGS } from './type.ts' +import { EOL } from '../../node/misc.ts' -export function SRParse(_tokens: Node[], rules: Rule[]) { - const tokens = [..._tokens] - const stack: Node[] = [] +export function SRParse(_nodes: Node[], rules: Rule[]) { + const leftNodes = [..._nodes] + const buffer: Node[] = [] let changed = false - tokenloop: while (true) { + nodeloop: while (true) { for (const rule of rules) { - if (stack.length < rule.pattern.length) continue + if (buffer.length < rule.pattern.length) continue - const stackSlice = stack.slice(-rule.pattern.length) + const stackSlice = buffer.slice(-rule.pattern.length) const satisfies = satisfiesPattern(stackSlice, rule.pattern) if (!satisfies) continue + + const isStatement = rule.flags?.includes(RULE_FLAGS.IS_STATEMENT) + + if (isStatement) { + const nextNode = leftNodes[0] + if (nextNode && !(nextNode instanceof EOL)) continue + + const lastNode = buffer[buffer.length - rule.pattern.length - 1] + if (lastNode && !(lastNode instanceof EOL)) continue + } + const reduced = reduce(stackSlice, rule) - stack.splice(-rule.pattern.length, rule.pattern.length, reduced) + buffer.splice(-rule.pattern.length, rule.pattern.length, reduced) changed = true - continue tokenloop + continue nodeloop } - if (tokens.length === 0) break - stack.push(tokens.shift()!) + if (leftNodes.length === 0) break + buffer.push(leftNodes.shift()!) } return { changed, - tokens: stack, + nodes: buffer, } } @@ -66,22 +77,12 @@ export function callParseRecursively( } } - parsedTokens.push( - new EOL([ - { - position: { column: 0, line: 0 }, - value: '\n', - type: TOKEN_TYPE.NEW_LINE, - }, - ]), - ) - const patternsByLevel = [...BASIC_RULES, externalPatterns, ADVANCED_RULES] loop1: while (true) { for (const patterns of patternsByLevel) { const result = SRParse(parsedTokens, patterns) - parsedTokens = result.tokens + parsedTokens = result.nodes if (result.changed) continue loop1 } diff --git a/core/prepare/parse/type.ts b/core/prepare/parse/type.ts new file mode 100644 index 0000000..32ac819 --- /dev/null +++ b/core/prepare/parse/type.ts @@ -0,0 +1,22 @@ +import type { Node } from '../../node/base.ts' +import type { Token } from '../tokenize/token.ts' + +export interface PatternUnit { + type: { + new (...args: any[]): Node + } + value?: string + as?: string +} + +export type Rule = { + pattern: PatternUnit[] + factory: (nodes: Node[], tokens: Token[]) => Node + config?: Record + flags?: RULE_FLAGS[] +} + +export enum RULE_FLAGS { + IS_STATEMENT, + DEBUG, +} diff --git a/core/type/code-file.ts b/core/type/code-file.ts index 474ec9b..f250206 100644 --- a/core/type/code-file.ts +++ b/core/type/code-file.ts @@ -7,7 +7,7 @@ import { parse } from '../prepare/parse/index.ts' import { YaksokError } from '../error/common.ts' import type { Token } from '../prepare/tokenize/token.ts' -import type { Rule } from '../prepare/parse/rule.ts' +import type { Rule } from '../prepare/parse/type.ts' import type { Runtime } from '../runtime/index.ts' import type { Block } from '../node/block.ts' diff --git a/deno.json b/deno.json index a5c0acb..be7ea2d 100644 --- a/deno.json +++ b/deno.json @@ -22,7 +22,7 @@ "test", "monaco-language-provider" ], - "version": "0.2.0-RC.6", + "version": "0.2.0-RC.7", "tasks": { "apply-version": "deno run --allow-read --allow-write apply-version.ts", "publish": "deno task --recursive test && deno publish --allow-dirty" diff --git a/monaco-language-provider/deno.json b/monaco-language-provider/deno.json index 1d0a206..7daabf2 100644 --- a/monaco-language-provider/deno.json +++ b/monaco-language-provider/deno.json @@ -4,5 +4,5 @@ }, "name": "@dalbit-yaksok/monaco-language-provider", "exports": "./mod.ts", - "version": "0.2.0-RC.6" + "version": "0.2.0-RC.7" } \ No newline at end of file diff --git a/quickjs/deno.json b/quickjs/deno.json index 31947d8..58d8e3e 100644 --- a/quickjs/deno.json +++ b/quickjs/deno.json @@ -9,5 +9,5 @@ "check-deploy": "deno publish --dry-run --allow-dirty", "test": "deno test --quiet --allow-net --allow-read --parallel & deno lint & deno task check-deploy" }, - "version": "0.2.0-RC.6" + "version": "0.2.0-RC.7" } \ No newline at end of file diff --git a/runtest.ts b/runtest.ts index 62ec98d..f9e7a47 100644 --- a/runtest.ts +++ b/runtest.ts @@ -1,5 +1,16 @@ import { yaksok } from '@dalbit-yaksok/core' -await yaksok(` -3 ~ 8.3 보여주기 -`) +await yaksok( + `이름: "재현" +나이: 20 +국적: '덴마크' + +만약 이름 = "재현" 이고 국적 = "대한민국" 이면 + "언제나 애국하는 우리 재현이" 보여주기 + + + +아니면 만약 국적 = "덴마크" 이면 + "재현이는 덴마크 사람이에요" 보여주기 +`, +) diff --git a/test/errors/invalid-variable-name.test.ts b/test/errors/invalid-variable-name.test.ts new file mode 100644 index 0000000..e394f68 --- /dev/null +++ b/test/errors/invalid-variable-name.test.ts @@ -0,0 +1,17 @@ +import { unreachable } from 'assert/unreachable' +import { yaksok } from '@dalbit-yaksok/core' +import { assertIsError } from 'assert' +import { NotDefinedIdentifierError } from '../../core/error/variable.ts' +import { assertEquals } from 'assert/equals' + +Deno.test('Variable name is not a valid identifier', async () => { + try { + await yaksok(` +1이름: "홍길동", +1이름 보여주기`) + unreachable() + } catch (e) { + assertIsError(e, NotDefinedIdentifierError) + assertEquals(e.resource?.name, '이름') + } +})