add reducer actions ADD_LEXEME, SET_LEXEME
This commit is contained in:
parent
fe474583ef
commit
a6b362efda
4 changed files with 98 additions and 1 deletions
17
src/reducers/stateReducer.features.test.js
Normal file
17
src/reducers/stateReducer.features.test.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
import {stateReducer} from './stateReducer';
|
||||
|
||||
describe('Features', () => {
|
||||
const state = {
|
||||
features: [
|
||||
'low', 'high','back', 'rounded', 'sonorant',
|
||||
'nasal', 'obstruent', 'occlusive', 'plosive',
|
||||
'prenasalized', 'aspirated', 'coronal', 'anterior'
|
||||
]
|
||||
};
|
||||
|
||||
it('features returned unaltered', () => {
|
||||
const action = {type: ''};
|
||||
expect(stateReducer(state, action)).toBe(state);
|
||||
});
|
||||
|
||||
});
|
|
@ -10,6 +10,20 @@ const stateReducer = (state, action) => {
|
|||
return initState();
|
||||
}
|
||||
|
||||
case 'ADD_LEXEME': {
|
||||
let newLexeme = action.value;
|
||||
if (!newLexeme.epoch) newLexeme.epoch = state.epochs[0].name;
|
||||
return {...state, lexicon:[...state.lexicon, newLexeme]}
|
||||
}
|
||||
|
||||
case 'SET_LEXICON': {
|
||||
let newLexicon = action.value;
|
||||
newLexicon = newLexicon.map(lexeme => lexeme.epoch
|
||||
? lexeme
|
||||
: {...lexeme, epoch: state.epochs[0].name});
|
||||
return {...state, lexicon: newLexicon}
|
||||
}
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
|
|
58
src/reducers/stateReducer.lexicon.test.js
Normal file
58
src/reducers/stateReducer.lexicon.test.js
Normal file
|
@ -0,0 +1,58 @@
|
|||
import {stateReducer} from './stateReducer';
|
||||
|
||||
describe('Lexicon', () => {
|
||||
const state = {
|
||||
lexicon: [
|
||||
{lexeme:'anta', epoch:'epoch 1'},
|
||||
{lexeme:'anat', epoch:'epoch 1'},
|
||||
{lexeme:'anət', epoch:'epoch 1'},
|
||||
{lexeme:'anna', epoch:'epoch 1'},
|
||||
{lexeme:'tan', epoch:'epoch 1'},
|
||||
{lexeme:'ənta', epoch:'epoch 1'}
|
||||
],
|
||||
epochs: [{name: 'epoch 1'}]
|
||||
};
|
||||
|
||||
it('lexicon returned unaltered', () => {
|
||||
const action = {type: ''};
|
||||
expect(stateReducer(state, action)).toBe(state);
|
||||
});
|
||||
|
||||
it('lexicon addition without epoch returns updated lexicon with default epoch', () => {
|
||||
const action = {type: 'ADD_LEXEME', value: {lexeme:'ntʰa'}}
|
||||
expect(stateReducer(state, action)).toEqual({...state, lexicon:[...state.lexicon, {lexeme:'ntʰa', epoch:'epoch 1'}]});
|
||||
});
|
||||
|
||||
it('lexicon addition with epoch returns updated lexicon with correct epoch', () => {
|
||||
const action = {type: 'ADD_LEXEME', value: {lexeme:'ntʰa', epoch: 'epoch 2'}}
|
||||
expect(stateReducer(state, action)).toEqual({...state, lexicon:[...state.lexicon, action.value]});
|
||||
});
|
||||
|
||||
it('lexicon set returns updated lexicon with correct epoch', () => {
|
||||
const newLexicon = [
|
||||
{lexeme:'anta', epoch:'epoch 1'},
|
||||
{lexeme:'anat', epoch:'epoch 1'},
|
||||
{lexeme:'anət', epoch:'epoch 1'},
|
||||
{lexeme:'anna', epoch:'epoch 1'}
|
||||
]
|
||||
const action = {type: 'SET_LEXICON', value: newLexicon}
|
||||
expect(stateReducer(state, action)).toEqual({...state, lexicon:newLexicon});
|
||||
});
|
||||
|
||||
it('lexicon set with no epoch returns updated lexicon with defaul epoch', () => {
|
||||
const newLexicon = [
|
||||
{lexeme:'anta', epoch:'epoch 1'},
|
||||
{lexeme:'anat', epoch:'epoch 1'},
|
||||
{lexeme:'anət', epoch:'epoch 2'},
|
||||
{lexeme:'anna', epoch:'epoch 1'}
|
||||
]
|
||||
const inputLexicon = [
|
||||
{lexeme:'anta'},
|
||||
{lexeme:'anat'},
|
||||
{lexeme:'anət', epoch:'epoch 2'},
|
||||
{lexeme:'anna'}
|
||||
]
|
||||
const action = {type: 'SET_LEXICON', value: inputLexicon}
|
||||
expect(stateReducer(state, action)).toEqual({...state, lexicon:newLexicon});
|
||||
})
|
||||
});
|
|
@ -0,0 +1,8 @@
|
|||
import {stateReducer} from './stateReducer';
|
||||
|
||||
it('default returns state unaltered', () => {
|
||||
const state = {data: 'example'};
|
||||
const action = {type: ''};
|
||||
expect(stateReducer(state, action)).toBe(state);
|
||||
});
|
||||
|
Loading…
Reference in a new issue