Skip to content

Instantly share code, notes, and snippets.

@lukekrikorian
Last active July 6, 2021 22:46
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save lukekrikorian/6ab1d186a5dcd26165eebdf5ad25222e to your computer and use it in GitHub Desktop.
Save lukekrikorian/6ab1d186a5dcd26165eebdf5ad25222e to your computer and use it in GitHub Desktop.
Calculator written in swift
import Foundation
typealias Tokens = [Token]
typealias Num = Double
typealias Char = Unicode.Scalar
typealias Reducer = (Num, Num) -> Num
struct Redux: Equatable {
let kind: Kind
let precedence: Int
let scalar: Char
let fn: Reducer
static let key: [Kind: [Char: (Int, Reducer)]] = [
.infix: [
"+": (1, +),
"-": (1, -),
"/": (2, /),
"*": (2, *),
"^": (3, { pow($0, $1) }),
"%": (3, { $0.truncatingRemainder(dividingBy: $1) })
],
.prefix: [
"-": (3, { x, _ in -x }),
"+": (3, { x, _ in +x }),
"√": (3, { x, _ in sqrt(x) }),
],
.postfix: [
"!": (3, { x, _ in
guard x > 1 else { return 1 }
var answer: Num = 1
for i in 2...Int(x) {
answer = answer * Num(i)
}
return answer
})
],
]
static subscript(_ k: Kind) -> [Char: (Int, Reducer)] { Self.key[k]! }
init(_ s: Char, _ k: Kind, _ p: Int = 3, _ f: @escaping Reducer) {
(scalar, precedence, kind, fn) = (s, p, k, f)
}
init?(_ kind: Kind, _ scalar: Char) {
guard let reducer = Self.key[kind]?[scalar] else {
return nil
}
self = .init(scalar, kind, reducer.0, reducer.1)
}
static func token(_ kind: Kind, _ scalar: Char) -> Token {
Self(kind, scalar)?.token ?? .illegal("unknown scalar \(scalar)")
}
var token: Token {
.reducer(self)
}
enum Kind {
case prefix, infix, postfix
}
static func ==(lhs: Redux, rhs: Redux) -> Bool {
return lhs.scalar == rhs.scalar && lhs.kind == rhs.kind
}
}
enum Token: Equatable {
case group([Token]), int(Num), illegal(String), reducer(Redux)
var str: String {
switch self {
case .reducer(let r):
return r.scalar.description
case .illegal(let err):
return "!\(err)!"
case .int(let int):
return String(int)
case .group(let tokens):
return "(" + tokens.map { $0.str }.joined(separator: " ") + ")"
}
}
}
class Tokenizer {
private let input: String
private var index: String.Index
private var char: Char
private var token: Token = Redux.token(.prefix, "+")
init?(_ input: String) {
guard !input.isEmpty else {
return nil
}
self.input = input
self.index = input.startIndex
self.char = input.unicodeScalars[index]
}
func read() {
(index, char) = next
}
var next: (index: String.Index, scalar: Char) {
let peek = input.index(after: index)
guard peek < input.endIndex else {
return (index, "\0")
}
return (peek, input.unicodeScalars[peek])
}
var tokens: Tokens {
var tokens = [Token]()
while let token = parse() {
tokens.append(token)
}
return tokens
}
func parseInt() -> Token {
var str = String(char)
while CharacterSet.decimalDigits.contains(next.scalar) {
str.append(next.scalar.description)
read()
}
guard let int = Num(str) else {
return .illegal(str)
}
return .int(int)
}
func parseGroup() -> Token {
var tokens = [Token]()
read()
while let token = parse() {
tokens.append(token)
if char == ")" {
return .group(tokens)
}
}
return .illegal("unclosed group")
}
func parse() -> Token? {
while CharacterSet.whitespaces.contains(char) { read() }
switch char {
case _ where Redux[.prefix][char] != nil:
switch token {
case .reducer:
token = Redux.token(.prefix, char)
default:
token = Redux.token(.infix, char)
}
case _ where Redux[.postfix][char] != nil:
token = Redux.token(.postfix, char)
case _ where Redux[.infix][char] != nil:
token = Redux.token(.infix, char)
case "\0":
return nil
case "(":
token = parseGroup()
case _ where CharacterSet.decimalDigits.contains(char):
token = parseInt()
default:
print("Unhandled character \(char) in \(input)")
token = .illegal(char.description)
}
read()
return token
}
}
func shunt(_ tokens: Tokens) -> Tokens {
var output = Tokens()
var operators = [Redux]()
for token in tokens {
switch token {
case .int:
output.append(token)
case .group(let tokens):
output.append(contentsOf: shunt(tokens))
case .reducer(let r):
switch r.kind {
case .postfix:
output.append(token)
case .prefix:
operators.insert(r, at: 0)
case .infix:
while let top = operators.first, top.precedence >= r.precedence {
operators.removeFirst()
output.append(top.token)
}
operators.insert(r, at: 0)
}
case .illegal: ()
}
}
operators.forEach { output.append($0.token) }
return output
}
func eval(_ tokens: Tokens) -> Num {
var tokens = tokens
var stack = [Num]()
while !tokens.isEmpty {
let token = tokens.removeFirst()
switch token {
case .int(let int):
stack.append(int)
case .reducer(let r):
switch r.kind {
case .prefix, .postfix:
let last = stack.popLast()!
let result = r.fn(last, 0)
stack.append(result)
case .infix:
let last = Array(stack.suffix(2))
stack.removeLast(2)
let result = r.fn(last[0], last[1])
stack.append(result)
}
case .illegal, .group: ()
}
}
return stack.first ?? 0
}
var tests: Dictionary<String, Num> = [
"1 + 1": 2,
"3 * 4": 12,
"(10 * 10) / 4": 25,
"(10 / (10)) + 4": 5,
"3 * 4 / 2": 6,
"10 + 5 * 3 / 5 + (12 / 4 + 7 / 1)": 23,
"(12 / 4 + 7 / 1)": 10,
"4 / 2 + 1": 3,
"((1)) + 1": 2,
"1 + 2 / 6 - 8 / 6 + (10 / 4) * 8 * 8": 160,
"2 ^ 1": 2,
"2 ^ 2": 4,
"2 ^ (8 / 2)": 16,
"3 - 2": 1,
"3 - -2": 5,
"-2": -2,
"+3": 3,
"√4": 2,
"√144": 12,
"√(12 * 12)": 12,
"10 % 2": 0,
"10 % 3": 1,
"---3": -3,
"3!": 6,
"5!": 120,
"1!": 1,
]
for (test, result) in tests {
if let tokens = Tokenizer(test)?.tokens {
let shunts = shunt(tokens)
let output = eval(shunts)
if output != result {
print(test + " is \(tokens.map { $0.str })" + " is " + shunts.map { $0.str }.joined(separator: " "))
print("\(output) [FAILED] should be \(result)")
} else {
print("\(test) = \(result)")
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment