Skip to content

Instantly share code, notes, and snippets.

@bradparker
Last active February 21, 2016 08:05
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bradparker/7a1042aa8b2140b72b93 to your computer and use it in GitHub Desktop.
Save bradparker/7a1042aa8b2140b72b93 to your computer and use it in GitHub Desktop.
Dunno yet
import { expect } from 'chai'
const head = (arr = []) => arr[0]
const tail = (arr = []) => arr.slice(1)
const last = (arr = []) => arr.slice(0).pop()
const init = (arr = []) => arr.slice(0, arr.length - 1)
const nextInScope = (tokens, depth = 1, closeCount = 0) => {
if (depth === closeCount) {
return tokens
}
if (head(tokens) === ')') {
return nextInScope(tail(tokens), depth, closeCount + 1)
} else {
return nextInScope(tail(tokens), depth, closeCount)
}
}
describe('nextInScope()', () => {
it('returns tokens after correct number of \')\' have been encountered', () => {
expect(nextInScope([')', ')', '1', '2'], 2)).to.eql(['1', '2'])
expect(nextInScope([')', '1', ')', '2'], 2)).to.eql(['2'])
})
})
const infixToPrefix = (operators = [], branch = [], expression = []) => {
if (branch.length === 0) {
return expression
}
if (operators.indexOf(head(branch)) !== -1) {
const prefixExp = [head(branch), last(expression), head(tail(branch))]
return infixToPrefix(operators, tail(tail(branch)), [...init(expression), prefixExp])
} else {
return infixToPrefix(operators, tail(branch), [...expression, head(branch)])
}
}
describe('infixToPrefix()', () => {
it('arranges a list of tokens with the operators fist and operands last', () => {
expect(infixToPrefix(['*'], ['1', '*', '2'])).to.eql([['*', '1', '2']])
})
it('maintains left association', () => {
expect(infixToPrefix(['*'], ['1', '*', '2', '*', '3'])).to.eql([['*', ['*', '1', '2'], '3']])
expect(infixToPrefix(['-'], ['1', '-', '2', '-', '3'])).to.eql([['-', ['-', '1', '2'], '3']])
})
it('only applies to provided operators', () => {
expect(infixToPrefix(['*'], ['1', '*', '2', '+', '3'])).to.eql([['*', '1', '2' ], '+', '3'])
expect(infixToPrefix(['*'], ['3', '+', '1', '*', '1', '+', '2'])).to.eql(['3', '+', ['*', '1', '1'], '+', '2'])
expect(infixToPrefix(['+'], infixToPrefix(['*'], ['1', '*', '2', '+', '3']))).to.eql([['+', ['*', '1', '2' ], '3']])
})
})
const createAst = (tokens, depth = 1, ast = []) => {
if (tokens.length === 0 || head(tokens) === ')') {
return infixToPrefix(['+', '-'], infixToPrefix(['*', '/'], ast))
}
if (head(tokens) === '(') {
const branch = createAst(tail(tokens), depth + 1)
const nextTokens = nextInScope(tokens.slice(branch.length + 1), depth)
return [...ast, branch, ...createAst(nextTokens, depth)]
} else {
return createAst(tail(tokens), depth, [...ast, head(tokens)])
}
}
describe('createAst()', () => {
it('takes a list of tokens and returns nested scoped expressions', () => {
expect(createAst(['1', '+', '2'])).to.eql([['+', '1', '2']])
expect(createAst(['(', '1', '+', '2', ')'])).to.eql([[['+', '1', '2']]])
expect(createAst(['(', '1', ')', '(', '2', ')'])).to.eql([['1'], ['2']])
expect(createAst(['(', '1', '(', '2', '(', '3', ')', ')', ')'])).to.eql([['1', ['2', ['3']]]])
expect(createAst(['(', '1', '(', '2', '(', '3', ')', ')', '1', ')'])).to.eql([['1', ['2', ['3']], '1']])
})
it('creates new scopes for operators, respecting higher precendence', () => {
expect(createAst(['1', '*', '1'])).to.eql([['*', '1', '1']])
expect(createAst(['1', '*', '1', '+', '2'])).to.eql([['+', [ '*', '1', '1' ], '2']])
expect(createAst(['3', '+', '1', '*', '1', '+', '2'])).to.eql([['+', ['+','3', ['*', '1', '1']], '2']])
expect(createAst(['3', '+', '1', '/', '1', '+', '2'])).to.eql([['+', ['+','3', ['/', '1', '1']], '2']])
})
})
const isDigit = (str = '') => str.match(/\d/)
const isInt = (str = '') => str.match(/^\d+$/)
const isFloat = (str = '') => str.match(/^\d+.?(\d+)?$/g)
const tokenize = (str) => {
return str.split('').reduce((tokens, character) => {
const lastToken = last(tokens)
if (isDigit(character) && isFloat(lastToken)) {
return [...init(tokens), (lastToken + character)]
} else if (character === '.' && isInt(lastToken)) {
return [...init(tokens), (lastToken + character)]
} else {
return [...tokens, character]
}
}, []).filter((token) => token !== ' ')
}
describe('tokenize()', () => {
it('turns a string into an array of tokens', () => {
expect(tokenize('1 2')).to.eql(['1', '2'])
expect(tokenize('1.1 2')).to.eql(['1.1', '2'])
expect(tokenize('1.1.1 2')).to.eql(['1.1', '.', '1', '2'])
expect(tokenize('1. 2')).to.eql(['1.', '2'])
expect(tokenize('1.3 + 2')).to.eql(['1.3', '+', '2'])
expect(tokenize('103456.3 + 2')).to.eql(['103456.3', '+', '2'])
expect(tokenize('(1.3 + 2) + (4 / 5)')).to.eql(['(', '1.3', '+', '2', ')', '+', '(', '4', '/', '5', ')'])
})
})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment