diff --git a/src/reducers/reducer.results.js b/src/reducers/reducer.results.js index 53366c7..ebce6b2 100644 --- a/src/reducers/reducer.results.js +++ b/src/reducers/reducer.results.js @@ -127,44 +127,66 @@ export const decomposeRules = (epoch: epochType, phones: {[key: string]: phoneTy return featureBundle; } +const isPhonemeBoundByRule = (phonemeFeatures, ruleFeatures) => { + if (!ruleFeatures) return true; + const match = ruleFeatures.filter((ruleFeature, index) => { + const phoneme = phonemeFeatures[index].features; + return Object.entries(ruleFeature).reduce((bool, entry) => { + if (!bool) return false; + if (!phoneme[entry[0]] && !entry[1]) return true; + if (phoneme[entry[0]] !== entry[1]) return false; + return true; + }, true); + }) + return match.length === ruleFeatures.length ? true : false; +} + +const swapPhoneme = (phoneme, newFeatures, features) => { + const newPhonemeFeatures = Object.entries(newFeatures).reduce((newPhoneme, [newFeature, newValue]) => { + return { ...newPhoneme, [newFeature]: newValue } + }, {...phoneme.features}) + const newPhonemeCandidates = Object.entries(newPhonemeFeatures).map(([newFeature, newValue]) => { + return features[newFeature][newValue ? 'positive': 'negative'] + }) + const newPhoneme = newPhonemeCandidates.reduce((candidates, value, index, array) => { + return candidates.filter(candidate => value.map(val => val.grapheme).includes(candidate.grapheme)) + }, newPhonemeCandidates[newPhonemeCandidates.length - 1]) + return newPhoneme[0]; +} + +export const transformLexeme = (lexemeBundle, rule, features) => { + const {pre, post, position} = rule.environment; + const newLexeme = lexemeBundle.reduce((newLexeme, phoneme, index) => { + if ( index < pre.length || index >= lexemeBundle.length - post.length ) return [...newLexeme, phoneme]; + if (!isPhonemeBoundByRule(lexemeBundle.slice(index - pre.length, index), pre)) return [...newLexeme, phoneme]; + if (!isPhonemeBoundByRule([phoneme], rule.environment.position)) return [...newLexeme, phoneme]; + if (!isPhonemeBoundByRule(lexemeBundle.slice(index, index + post.length), post)) return [...newLexeme, phoneme]; + const newPhoneme = swapPhoneme(phoneme, rule.newFeatures[0], features); + return [...newLexeme, newPhoneme]; + }, []) + return newLexeme; + +} + export const run = (state: stateType, action: resultsAction): stateType => { // for each epoch // TODO iterate through each epoch - let ruleBundle = state.epochs[0].changes; - - // for each rule in epoch - ruleBundle = ruleBundle.map(rule => decomposeRule(rule)) - // parse rule into feature bundles for - // environment - // pre-target - // post-target - // target - // mutation - // for each item in lexicon - // match targets in environments - // mutate target - // temporarily store lexical item - // store lexical items in resulting epoch - - - + const epoch = state.epochs[0]; + const phones = state.phones; + const lexicon = state.lexicon; + const features = state.features; + const ruleBundle = decomposeRules(epoch, phones); + const lexiconBundle = lexicon.map(lexeme => findFeaturesFromLexeme(phones, lexeme.lexeme)) - ruleBundle.map(rule => { - rule.forEach(position => { - console.log(position) - }) + const results = lexiconBundle.map(lexemeBundle => { + return ruleBundle.reduce((lexeme, rule) => { + return transformLexeme(lexeme, rule, features); + }, lexemeBundle) }) - let featurePhoneBundle = state.lexicon.map(lexeme => findFeaturesFromLexeme(state.phones, lexeme)) - - console.log(featurePhoneBundle) - ruleBundle.forEach(rule => { - featurePhoneBundle.map(featurePhone => { - // if (findRules(featurePhone, ) - }) -}) - - let results = []; - return {...state, results: { pass: state.epochs[0].name, results } } + const stringifiedResults = results.map(lexemeBundle => { + return Object.entries(lexemeBundle).map(phoneme => phoneme[1].grapheme).join('') + }) + return {...state, results: { pass: state.epochs[0].name, results: stringifiedResults } } } \ No newline at end of file diff --git a/src/reducers/reducer.results.test.js b/src/reducers/reducer.results.test.js index 2a78d5c..c69b4d3 100644 --- a/src/reducers/reducer.results.test.js +++ b/src/reducers/reducer.results.test.js @@ -1,6 +1,6 @@ import { stateReducer } from './reducer'; import { initState } from './reducer.init'; -import { decomposeRules } from './reducer.results'; +import { decomposeRules, transformLexeme } from './reducer.results'; describe('Results', () => { let state = {}; @@ -35,17 +35,66 @@ describe('Results', () => { } ]; expect(decomposeRules(epoch, phones)).toStrictEqual(result); + }); + + it('expect transform lexeme to apply rule to lexeme', () => { + const lexemeBundle = [ + { + grapheme: 'a', + features: { + sonorant: true, + back: true, + low: true, + high: false, + rounded: false + } + }, + { + grapheme: 'n', + features: { sonorant: true, nasal: true, occlusive: true, coronal: true } + }, + { + grapheme: 't', + features: { occlusive: true, coronal: true, obstruent: true } + }, + { + grapheme: 'a', + features: { + sonorant: true, + back: true, + low: true, + high: false, + rounded: false + } + } + ] + + const resultsLexeme = [...lexemeBundle] + resultsLexeme[2] = lexemeBundle[1] + + const rule = { + environment: { + pre: [ { sonorant: true, nasal: true, occlusive: true, coronal: true } ], + position: [ { occlusive: true, nasal: false } ], + post: [] + }, + newFeatures: [ { occlusive: true, nasal: true } ] + } + + expect(transformLexeme(lexemeBundle, rule, initState().features)).toEqual(resultsLexeme) + }) - // it('results returned from first sound change rule', () => { - // const action = {type: 'RUN'}; - // state = initState(0) - // expect(stateReducer(state, action).results).toEqual({ - // pass: 'epoch 1', - // results: [ - // 'anna', 'anat', 'anət', 'anna', 'tan', 'ənna' - // ] - // }) - // }); + + it('results returned from first sound change rule', () => { + const action = {type: 'RUN'}; + state = initState(0) + expect(stateReducer(state, action).results).toEqual({ + pass: 'epoch 1', + results: [ + 'anna', 'anat', 'anət', 'anna', 'tan', 'ənna' + ] + }) + }); }); \ No newline at end of file