diff --git a/src/utils/latl/grammar.js b/src/utils/latl/grammar.js index 9e51a0c..8a6d063 100644 --- a/src/utils/latl/grammar.js +++ b/src/utils/latl/grammar.js @@ -5,47 +5,46 @@ function id(x) { return x[0]; } const { lexer } = require('./lexer.js'); const getTerminal = d => d ? d[0] : null; - const getAll = d => d.map((item, i) => ({[i]: item})); - const flag = token => d => d.map(item => ({[token]: item})) + const getAll = d => d.map((item, i) => ({ [i]: item })); + const flag = token => d => d.map(item => ({ [token]: item })) const clearNull = d => d.filter(t => !!t); const flagIndex = d => d.map((item, i) => ({[i]: item})) const remove = _ => null; const append = d => d.join(''); const constructSet = d => d.reduce((acc, t) => { - if (t && t.type === 'setIdentifier') acc.push({set: t}) - if (t && t.length) acc[acc.length - 1].phones = t; + if (t && t.type === 'setIdentifier') acc.push({set: t}); + if (t && t.length) acc[acc.length - 1].phones = t; return acc; }, []); - const compose = (...funcs) => d => funcs.reduce((acc, func) => func(acc), d) + const pipe = (...funcs) => d => funcs.reduce((acc, func) => func(acc), d); var grammar = { Lexer: lexer, ParserRules: [ {"name": "main$ebnf$1", "symbols": []}, - {"name": "main$ebnf$1$subexpression$1", "symbols": ["statement"]}, + {"name": "main$ebnf$1$subexpression$1", "symbols": ["_", "statement", "_"]}, {"name": "main$ebnf$1", "symbols": ["main$ebnf$1", "main$ebnf$1$subexpression$1"], "postprocess": function arrpush(d) {return d[0].concat([d[1]]);}}, - {"name": "main", "symbols": ["main$ebnf$1"], "postprocess": compose(flag('main'), getTerminal)}, + {"name": "main", "symbols": ["main$ebnf$1"], "postprocess": pipe(clearNull, flag('main'), getTerminal)}, {"name": "_$ebnf$1$subexpression$1", "symbols": [(lexer.has("whiteSpace") ? {type: "whiteSpace"} : whiteSpace)]}, {"name": "_$ebnf$1", "symbols": ["_$ebnf$1$subexpression$1"], "postprocess": id}, {"name": "_$ebnf$1", "symbols": [], "postprocess": function(d) {return null;}}, {"name": "_", "symbols": ["_$ebnf$1"], "postprocess": remove}, {"name": "__", "symbols": [(lexer.has("whiteSpace") ? {type: "whiteSpace"} : whiteSpace)], "postprocess": remove}, {"name": "statement", "symbols": ["comment"]}, - {"name": "statement", "symbols": ["definition"], "postprocess": compose(clearNull, getTerminal)}, - {"name": "comment", "symbols": [(lexer.has("comment") ? {type: "comment"} : comment)], "postprocess": compose(remove, getTerminal)}, - {"name": "definition", "symbols": [(lexer.has("kwSet") ? {type: "kwSet"} : kwSet), "__", "setDefinition"], "postprocess": d => ({token: 'setDefinition', sets: d[2]})}, + {"name": "statement", "symbols": ["definition"], "postprocess": getTerminal, clearNull}, + {"name": "comment", "symbols": [(lexer.has("comment") ? {type: "comment"} : comment)], "postprocess": pipe(getTerminal, remove)}, + {"name": "definition", "symbols": [(lexer.has("kwSet") ? {type: "kwSet"} : kwSet), "__", "setDefinition"], "postprocess": d => ({token: d[0].type, [d[0].value]: d[2]})}, {"name": "setDefinition$ebnf$1", "symbols": []}, {"name": "setDefinition$ebnf$1$subexpression$1", "symbols": [(lexer.has("setIdentifier") ? {type: "setIdentifier"} : setIdentifier), "__", (lexer.has("equal") ? {type: "equal"} : equal), "__", "setExpression", (lexer.has("comma") ? {type: "comma"} : comma), "__"]}, {"name": "setDefinition$ebnf$1", "symbols": ["setDefinition$ebnf$1", "setDefinition$ebnf$1$subexpression$1"], "postprocess": function arrpush(d) {return d[0].concat([d[1]]);}}, - {"name": "setDefinition", "symbols": ["setDefinition$ebnf$1", (lexer.has("setIdentifier") ? {type: "setIdentifier"} : setIdentifier), "__", (lexer.has("equal") ? {type: "equal"} : equal), "__", "setExpression"], "postprocess": constructSet}, + {"name": "setDefinition", "symbols": ["setDefinition$ebnf$1", (lexer.has("setIdentifier") ? {type: "setIdentifier"} : setIdentifier), "__", (lexer.has("equal") ? {type: "equal"} : equal), "__", "setExpression"]}, {"name": "setExpression", "symbols": [(lexer.has("openSquareBracket") ? {type: "openSquareBracket"} : openSquareBracket), "_", "phoneList", "_", (lexer.has("closeSquareBracket") ? {type: "closeSquareBracket"} : closeSquareBracket)], "postprocess": d => d.filter(t => t && t.length)}, {"name": "phoneList$ebnf$1", "symbols": []}, {"name": "phoneList$ebnf$1$subexpression$1", "symbols": [(lexer.has("phone") ? {type: "phone"} : phone), (lexer.has("comma") ? {type: "comma"} : comma), "_"]}, {"name": "phoneList$ebnf$1", "symbols": ["phoneList$ebnf$1", "phoneList$ebnf$1$subexpression$1"], "postprocess": function arrpush(d) {return d[0].concat([d[1]]);}}, - {"name": "phoneList", "symbols": ["phoneList$ebnf$1", (lexer.has("phone") ? {type: "phone"} : phone)], "postprocess": d => d.filter(t => t && (t.type === 'phone' || t.length) ) - .map(t => { + {"name": "phoneList", "symbols": ["phoneList$ebnf$1", (lexer.has("phone") ? {type: "phone"} : phone)], "postprocess": d => d.filter(t => t && (t.type === 'phone' || t[0]) ) + .flatMap(t => { if (!t.length) return t; - t.filter(st => st && st.type === 'phone') - return t; + return t[0].filter(st => st && st.type === 'phone') }) } ] , ParserStart: "main" diff --git a/src/utils/latl/grammar.ne b/src/utils/latl/grammar.ne index fff5adb..4807918 100644 --- a/src/utils/latl/grammar.ne +++ b/src/utils/latl/grammar.ne @@ -1,24 +1,24 @@ @{% const { lexer } = require('./lexer.js'); const getTerminal = d => d ? d[0] : null; - const getAll = d => d.map((item, i) => ({[i]: item})); - const flag = token => d => d.map(item => ({[token]: item})) + const getAll = d => d.map((item, i) => ({ [i]: item })); + const flag = token => d => d.map(item => ({ [token]: item })) const clearNull = d => d.filter(t => !!t); const flagIndex = d => d.map((item, i) => ({[i]: item})) const remove = _ => null; const append = d => d.join(''); const constructSet = d => d.reduce((acc, t) => { - if (t && t.type === 'setIdentifier') acc.push({set: t}) - if (t && t.length) acc[acc.length - 1].phones = t; + if (t && t.type === 'setIdentifier') acc.push({set: t}); + if (t && t.length) acc[acc.length - 1].phones = t; return acc; }, []); - const compose = (...funcs) => d => funcs.reduce((acc, func) => func(acc), d) + const pipe = (...funcs) => d => funcs.reduce((acc, func) => func(acc), d); %} @lexer lexer -main -> (statement):* - {% compose(flag('main'), getTerminal) %} +main -> (_ statement _):* + {% pipe(clearNull, flag('main'), getTerminal) %} _ -> (%whiteSpace):? {% remove %} @@ -26,24 +26,27 @@ _ -> (%whiteSpace):? __ -> %whiteSpace {% remove %} -statement -> comment | definition - {% compose(clearNull, getTerminal) %} +statement -> comment | definition + {% getTerminal, clearNull %} comment -> %comment - {% compose(remove, getTerminal) %} + {% pipe(getTerminal, remove) %} # SETS -definition -> %kwSet __ setDefinition {% d => ({token: 'setDefinition', sets: d[2]}) %} +definition -> %kwSet __ setDefinition + {% d => ({token: d[0].type, [d[0].value]: d[2]}) %} setDefinition -> (%setIdentifier __ %equal __ setExpression %comma __):* %setIdentifier __ %equal __ setExpression - {% constructSet %} + # {% pipe( + # //constructSet, + # getTerminal) %} setExpression -> %openSquareBracket _ phoneList _ %closeSquareBracket {% d => d.filter(t => t && t.length) %} phoneList -> (%phone %comma _):* %phone - {% d => d.filter(t => t && (t.type === 'phone' || t.length) ) - .map(t => { + # {% clearNull %} + {% d => d.filter(t => t && (t.type === 'phone' || t[0]) ) + .flatMap(t => { if (!t.length) return t; - t.filter(st => st && st.type === 'phone') - return t; + return t[0].filter(st => st && st.type === 'phone') }) %} diff --git a/src/utils/latl/test/parser.test.js b/src/utils/latl/test/parser.test.js index 2b36d59..60a5e39 100644 --- a/src/utils/latl/test/parser.test.js +++ b/src/utils/latl/test/parser.test.js @@ -7,15 +7,14 @@ describe('parser', () => { const { latl } = assertionData.simpleComment; const AST = parser().feed(latl).results; expect(AST.length).toBe(1); - console.log(AST[0]) - // expect(AST[0]).toStrictEqual() + expect(AST[0]).toStrictEqual({ main: [ null ]}) }) - // it('parses multiple set definitions with comma operator', () => { - // const { latl } = assertionData.commaSetDefinition; - // const AST = parser().feed(latl) - // console.log(AST) - // }); + it('parses multiple set definitions with comma operator', () => { + const { latl } = assertionData.commaSetDefinition; + const AST = parser().feed(latl).results; + console.log(AST[0]) + }); // it('lexes set definition with alias', () => { // const { latl, tokens } = assertionData.setAliasDefinition;