Skip to content

Instantly share code, notes, and snippets.

@griffin-stewie
Created October 11, 2021 22:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save griffin-stewie/d4b1929fd6457026701f1cf221741199 to your computer and use it in GitHub Desktop.
Save griffin-stewie/d4b1929fd6457026701f1cf221741199 to your computer and use it in GitHub Desktop.
Implemented CSS Tokenizer in Swift. Original Rust code is https://www.youtube.com/watch?v=8Y0-lrFf3Qk
import Foundation
public struct CSSTokenizer {
var position: String.Index
let input: String
public init(input: String) {
self.position = input.startIndex
self.input = input
}
}
private extension CSSTokenizer {
mutating func consumeName() -> Token {
func check(_ char: Character) -> Bool {
switch char {
case "a"..."z", "A"..."Z", "0"..."9", "-", "_":
return true
default:
return false
}
}
let start: String.Index = position
var end: String.Index = position
repeat {
let cursor = input.index(after: position)
let c = input[cursor]
if check(c) {
position = cursor
end = position
} else {
break
}
} while position < input.endIndex
let sub = input[start...end]
return .ident(String(sub))
}
}
extension CSSTokenizer: IteratorProtocol {
public typealias Element = Token
mutating public func next() -> Self.Element? {
guard position < input.endIndex else {
return nil
}
defer {
position = input.index(after: position)
}
let c = input[position]
switch c {
case ":":
return .colon
case ";":
return .semiColon
case "{":
return .openCurly
case "}":
return .closeCurly
case "a"..."z", "A"..."Z":
return consumeName()
default:
fatalError("Something went wrong. Maybe invalid character.")
}
}
}
public enum Token {
case colon
case semiColon
case openCurly
case closeCurly
case ident(String)
case eof
}
extension Token: Equatable {
}
let input = "h1{background-color:red;}"
var tokenizer = CSSTokenizer(input: input)
print("--------------------")
print("style: \(input)")
print("--------------------")
var tokens: [Token] = []
while let token = tokenizer.next() {
print("token \(token)")
tokens.append(token)
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment