init codeGenerator in latl

This commit is contained in:
Sorrel Bri 2020-05-06 22:31:15 -07:00
parent 432630e600
commit dee27b0d30
6 changed files with 59 additions and 49 deletions

View file

@ -0,0 +1,19 @@
import { parser } from './parser';
export const codeGenerator = (latl) => {
const results = parser().feed(latl).results;
const nodeReader = (code, node) => {
if (node.length) {
return results.reduce(nodeReader, code)
}
if (!node) return code;
if (node.main) {
return nodeReader(code, node.main)
}
return code + node;
}
return nodeReader('', results)
}

View file

@ -6,8 +6,8 @@ function id(x) { return x[0]; }
const { lexer } = require('./lexer.js');
const getTerminal = d => d ? d[0] : null;
const getAll = d => d.map((item, i) => ({ [i]: item }));
const flag = token => d => d.map(item => ({ [token]: item }))
const clearNull = d => d.filter(t => !!t);
const flag = token => d => d.map(item => ({ [token]: clearNull(item) }))
const clearNull = d => d.filter(t => !!t && (t.length !== 1 || t[0])).map(t => t.length ? clearNull(t) : t);
const flagIndex = d => d.map((item, i) => ({[i]: item}))
const remove = _ => null;
const append = d => d.join('');
@ -21,16 +21,21 @@ var grammar = {
Lexer: lexer,
ParserRules: [
{"name": "main$ebnf$1", "symbols": []},
{"name": "main$ebnf$1$subexpression$1", "symbols": ["_", "statement", "_"]},
{"name": "main$ebnf$1$subexpression$1", "symbols": ["_", "statement"]},
{"name": "main$ebnf$1", "symbols": ["main$ebnf$1", "main$ebnf$1$subexpression$1"], "postprocess": function arrpush(d) {return d[0].concat([d[1]]);}},
{"name": "main", "symbols": ["main$ebnf$1"], "postprocess": pipe(clearNull, flag('main'), getTerminal)},
{"name": "main", "symbols": ["main$ebnf$1", "_"], "postprocess": pipe(
getTerminal,
clearNull,
flag('main'),
getTerminal,
) },
{"name": "_$ebnf$1$subexpression$1", "symbols": [(lexer.has("whiteSpace") ? {type: "whiteSpace"} : whiteSpace)]},
{"name": "_$ebnf$1", "symbols": ["_$ebnf$1$subexpression$1"], "postprocess": id},
{"name": "_$ebnf$1", "symbols": [], "postprocess": function(d) {return null;}},
{"name": "_", "symbols": ["_$ebnf$1"], "postprocess": remove},
{"name": "__", "symbols": [(lexer.has("whiteSpace") ? {type: "whiteSpace"} : whiteSpace)], "postprocess": remove},
{"name": "statement", "symbols": ["comment"]},
{"name": "statement", "symbols": ["definition"], "postprocess": getTerminal, clearNull},
{"name": "statement", "symbols": ["definition"], "postprocess": pipe(getTerminal, clearNull)},
{"name": "comment", "symbols": [(lexer.has("comment") ? {type: "comment"} : comment)], "postprocess": pipe(getTerminal, remove)},
{"name": "definition", "symbols": [(lexer.has("kwSet") ? {type: "kwSet"} : kwSet), "__", "setDefinition"], "postprocess": d => ({token: d[0].type, [d[0].value]: d[2]})},
{"name": "setDefinition$ebnf$1", "symbols": []},

View file

@ -2,8 +2,8 @@
const { lexer } = require('./lexer.js');
const getTerminal = d => d ? d[0] : null;
const getAll = d => d.map((item, i) => ({ [i]: item }));
const flag = token => d => d.map(item => ({ [token]: item }))
const clearNull = d => d.filter(t => !!t);
const flag = token => d => d.map(item => ({ [token]: clearNull(item) }))
const clearNull = d => d.filter(t => !!t && (t.length !== 1 || t[0])).map(t => t.length ? clearNull(t) : t);
const flagIndex = d => d.map((item, i) => ({[i]: item}))
const remove = _ => null;
const append = d => d.join('');
@ -17,8 +17,13 @@
@lexer lexer
main -> (_ statement _):*
{% pipe(clearNull, flag('main'), getTerminal) %}
main -> (_ statement):* _
{% pipe(
getTerminal,
clearNull,
flag('main'),
getTerminal,
) %}
_ -> (%whiteSpace):?
{% remove %}
@ -27,7 +32,7 @@ __ -> %whiteSpace
{% remove %}
statement -> comment | definition
{% getTerminal, clearNull %}
{% pipe(getTerminal, clearNull) %}
comment -> %comment
{% pipe(getTerminal, remove) %}
@ -48,35 +53,3 @@ phoneList -> (%phone %comma _):* %phone
if (!t.length) return t;
return t[0].filter(st => st && st.type === 'phone')
}) %}
# assignmentExpression:
# /*
# * SPEC:
# * conditionalExpression
# * | leftHandSideExpression assignmentOperator assignmentExpression
# */
# (leftHandSideExpression assignmentOperator) =>
# leftHandSideExpression assignmentOperator assignmentExpression
# | conditionalExpression
# ;
# assignmentExpressionNoln:
# conditionalExpressionNoln
# | leftHandSideExpression assignmentOperator assignmentExpressionNoln
# ;
# assignmentOperator:
# /* note that in the grammar these are listed out explicitely */
# EQ | TIMESEQ | DIVIDEEQ | PERCENTEQ | PLUSEQ | MINUSEQ | LSHIFTEQ | RSHIFTEQ
# | GT3EQ | AMPEREQ | CAROTEQ | PIPEEQ
# ;
# expression:
# /*
# * SPEC:
# * assignmentExpression
# * | expression COMMA assignmentExpression
# */
# assignmentExpression (expressionTail)*
# ;

View file

@ -3,7 +3,8 @@ export const assertionData = {
latl: `; comment`,
tokens: [
{ type: 'comment', value: '; comment'}
]
],
code: ''
},
simpleSetDefinition: {
latl: `set PLOSIVES`,

View file

@ -0,0 +1,12 @@
import { assertionData } from './assertionData';
import { codeGenerator } from '../codeGenerator';
describe('codeGenerator', () => {
it('parses simple comment', () => {
const { latl } = assertionData.simpleComment;
const code = codeGenerator(latl)
// expect(AST.length).toBe(1);
// expect(AST[0]).toStrictEqual({ main: [ ]})
console.log(code)
})
})

View file

@ -7,14 +7,14 @@ describe('parser', () => {
const { latl } = assertionData.simpleComment;
const AST = parser().feed(latl).results;
expect(AST.length).toBe(1);
expect(AST[0]).toStrictEqual({ main: [ null ]})
expect(AST[0]).toStrictEqual({ main: [ ]})
})
it('parses multiple set definitions with comma operator', () => {
const { latl } = assertionData.commaSetDefinition;
const AST = parser().feed(latl).results;
console.log(AST[0])
});
// it('parses multiple set definitions with comma operator', () => {
// const { latl } = assertionData.commaSetDefinition;
// const AST = parser().feed(latl).results;
// console.log(AST[0])
// });
// it('lexes set definition with alias', () => {
// const { latl, tokens } = assertionData.setAliasDefinition;