add support for set aliases
This commit is contained in:
parent
9619b4a07c
commit
bb8c05a579
5 changed files with 52 additions and 9 deletions
|
@ -38,6 +38,13 @@ Sets are defined with the set keyword followed by an equal sign and a set expres
|
||||||
```
|
```
|
||||||
set SHORT_VOWELS = [ a, i, u ]
|
set SHORT_VOWELS = [ a, i, u ]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
A single alias can be provided to the set during definition:
|
||||||
|
```
|
||||||
|
; the alias N can be used to refer to this set
|
||||||
|
set NASAL_PULMONIC_CONSONANTS, N = [ m, ɱ, n̼, n, ɳ, ɲ, ŋ, ɴ ]
|
||||||
|
```
|
||||||
|
|
||||||
Lists of sets can be defined using a comma followed by whitespace syntax
|
Lists of sets can be defined using a comma followed by whitespace syntax
|
||||||
```
|
```
|
||||||
set PLOSIVES = [ p, t, k ],
|
set PLOSIVES = [ p, t, k ],
|
||||||
|
|
|
@ -58,13 +58,27 @@ var grammar = {
|
||||||
d => d.map(u => u && u.length ? u.map(v => v.length ? v.filter(t => t && t.type !== 'comma' && t.type !== 'kwSet')[0] : v) : u),
|
d => d.map(u => u && u.length ? u.map(v => v.length ? v.filter(t => t && t.type !== 'comma' && t.type !== 'kwSet')[0] : v) : u),
|
||||||
clearNull,
|
clearNull,
|
||||||
) },
|
) },
|
||||||
{"name": "setDefinition", "symbols": [(lexer.has("setIdentifier") ? {type: "setIdentifier"} : setIdentifier), "__", "equal", "__", "setExpression"], "postprocess":
|
{"name": "setDefinition$ebnf$1$subexpression$1", "symbols": ["setAlias"]},
|
||||||
|
{"name": "setDefinition$ebnf$1", "symbols": ["setDefinition$ebnf$1$subexpression$1"], "postprocess": id},
|
||||||
|
{"name": "setDefinition$ebnf$1", "symbols": [], "postprocess": function(d) {return null;}},
|
||||||
|
{"name": "setDefinition", "symbols": [(lexer.has("setIdentifier") ? {type: "setIdentifier"} : setIdentifier), "setDefinition$ebnf$1", "__", "equal", "__", "setExpression"], "postprocess":
|
||||||
pipe(
|
pipe(
|
||||||
d => d.filter(t => !!t && t.length !== 0),
|
d => d.filter(t => !!t && t.length !== 0),
|
||||||
|
d => d.map(u => u && u.length ? u.map(t => t && t.length ? t.filter(v => v && v.type !== 'comma') : t) : u),
|
||||||
d => d.map(t => t.type === 'setIdentifier' ? { setIdentifier: t.toString() } : t),
|
d => d.map(t => t.type === 'setIdentifier' ? { setIdentifier: t.toString() } : t),
|
||||||
d => d.map(t => t && t.length && t[0].hasOwnProperty('setExpression') ? t[0] : t),
|
d => d.map(t => t && t.length && t[0].hasOwnProperty('setExpression') ? t[0] : t),
|
||||||
|
d => d.map(t => t.length ?
|
||||||
|
// pretty ugly ([ { type: 'aias', alias: [ string ] }] ) => { setAlias: str }
|
||||||
|
{ setAlias: t.reduce((aliases, token) => token.type === 'alias' ? [...aliases, ...token.alias] : aliases, [])[0] }
|
||||||
|
: t),
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
{"name": "setAlias", "symbols": [(lexer.has("comma") ? {type: "comma"} : comma), "_", (lexer.has("setIdentifier") ? {type: "setIdentifier"} : setIdentifier)], "postprocess": pipe(
|
||||||
|
d => d && d.length ? d.filter(t => !!t) : d,
|
||||||
|
d => d.map(t => t.type === 'setIdentifier' ? t.toString() : null),
|
||||||
|
d => d.filter(t => !!t),
|
||||||
|
d => ({type: 'alias', alias: d }),
|
||||||
|
) },
|
||||||
{"name": "setExpression", "symbols": [(lexer.has("openSquareBracket") ? {type: "openSquareBracket"} : openSquareBracket), "_", "phoneList", "_", (lexer.has("closeSquareBracket") ? {type: "closeSquareBracket"} : closeSquareBracket)], "postprocess":
|
{"name": "setExpression", "symbols": [(lexer.has("openSquareBracket") ? {type: "openSquareBracket"} : openSquareBracket), "_", "phoneList", "_", (lexer.has("closeSquareBracket") ? {type: "closeSquareBracket"} : closeSquareBracket)], "postprocess":
|
||||||
pipe(
|
pipe(
|
||||||
// filters commas and whitespace
|
// filters commas and whitespace
|
||||||
|
|
|
@ -59,14 +59,26 @@ definition -> %kwSet __ (setDefinition %comma __):* setDefinition
|
||||||
d => d.map(u => u && u.length ? u.map(v => v.length ? v.filter(t => t && t.type !== 'comma' && t.type !== 'kwSet')[0] : v) : u),
|
d => d.map(u => u && u.length ? u.map(v => v.length ? v.filter(t => t && t.type !== 'comma' && t.type !== 'kwSet')[0] : v) : u),
|
||||||
clearNull,
|
clearNull,
|
||||||
) %}
|
) %}
|
||||||
setDefinition -> %setIdentifier __ equal __ setExpression
|
setDefinition -> %setIdentifier (setAlias):? __ equal __ setExpression
|
||||||
{%
|
{%
|
||||||
pipe(
|
pipe(
|
||||||
d => d.filter(t => !!t && t.length !== 0),
|
d => d.filter(t => !!t && t.length !== 0),
|
||||||
|
d => d.map(u => u && u.length ? u.map(t => t && t.length ? t.filter(v => v && v.type !== 'comma') : t) : u),
|
||||||
d => d.map(t => t.type === 'setIdentifier' ? { setIdentifier: t.toString() } : t),
|
d => d.map(t => t.type === 'setIdentifier' ? { setIdentifier: t.toString() } : t),
|
||||||
d => d.map(t => t && t.length && t[0].hasOwnProperty('setExpression') ? t[0] : t),
|
d => d.map(t => t && t.length && t[0].hasOwnProperty('setExpression') ? t[0] : t),
|
||||||
|
d => d.map(t => t.length ?
|
||||||
|
// pretty ugly ([ { type: 'aias', alias: [ string ] }] ) => { setAlias: str }
|
||||||
|
{ setAlias: t.reduce((aliases, token) => token.type === 'alias' ? [...aliases, ...token.alias] : aliases, [])[0] }
|
||||||
|
: t),
|
||||||
)
|
)
|
||||||
%}
|
%}
|
||||||
|
setAlias -> %comma _ %setIdentifier
|
||||||
|
{% pipe(
|
||||||
|
d => d && d.length ? d.filter(t => !!t) : d,
|
||||||
|
d => d.map(t => t.type === 'setIdentifier' ? t.toString() : null),
|
||||||
|
d => d.filter(t => !!t),
|
||||||
|
d => ({type: 'alias', alias: d }),
|
||||||
|
) %}
|
||||||
setExpression -> %openSquareBracket _ phoneList _ %closeSquareBracket
|
setExpression -> %openSquareBracket _ phoneList _ %closeSquareBracket
|
||||||
{%
|
{%
|
||||||
pipe(
|
pipe(
|
||||||
|
|
|
@ -228,7 +228,17 @@ set NASAL_PULMONIC_CONSONANTS, N = [ m̥, m, ɱ, n̼, n̥, n, ɳ̊,
|
||||||
{ type: 'phone', value: 'ɴ' },
|
{ type: 'phone', value: 'ɴ' },
|
||||||
{ type: 'whiteSpace', value: ' ' },
|
{ type: 'whiteSpace', value: ' ' },
|
||||||
{ type: 'closeSquareBracket', value: ']' },
|
{ type: 'closeSquareBracket', value: ']' },
|
||||||
|
],
|
||||||
|
AST: {
|
||||||
|
main: [
|
||||||
|
{
|
||||||
|
type: 'set',
|
||||||
|
setIdentifier: 'NASAL_PULMONIC_CONSONANTS',
|
||||||
|
setAlias: 'N',
|
||||||
|
setExpression: [ 'm̥', 'm', 'ɱ', 'n̼', 'n̥', 'n', 'ɳ̊', 'ɳ', 'ɲ̊', 'ɲ', 'ŋ', '̊ŋ', 'ɴ' ]
|
||||||
|
}
|
||||||
]
|
]
|
||||||
|
}
|
||||||
},
|
},
|
||||||
setDefinitionJoin: {
|
setDefinitionJoin: {
|
||||||
latl: `
|
latl: `
|
||||||
|
|
|
@ -24,12 +24,12 @@ describe('parser', () => {
|
||||||
expect(feedResults[0]).toStrictEqual(AST);
|
expect(feedResults[0]).toStrictEqual(AST);
|
||||||
});
|
});
|
||||||
|
|
||||||
it.todo('lexes set definition with alias'
|
it('lexes set definition with alias'
|
||||||
// , () => {
|
, () => {
|
||||||
// const { latl, tokens } = assertionData.setAliasDefinition;
|
const { latl, AST } = assertionData.setAliasDefinition;
|
||||||
// const stream = getStream(latl);
|
const feedResults = parser().feed(latl).results;
|
||||||
// expect(stream).toStrictEqual(tokens);
|
expect(feedResults[0]).toStrictEqual(AST);
|
||||||
// }
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
it.todo('lexes set definition with set join'
|
it.todo('lexes set definition with set join'
|
||||||
|
|
Loading…
Reference in a new issue