Skip to content

Instantly share code, notes, and snippets.

@nerdsupremacist
Last active January 21, 2021 11:31
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save nerdsupremacist/50e264096f7ac0f36396277224880267 to your computer and use it in GitHub Desktop.
Save nerdsupremacist/50e264096f7ac0f36396277224880267 to your computer and use it in GitHub Desktop.
Custom Query Parser for Game of Thrones Wiki using Ogma
import Ogma
// MARK: Structure
struct Query {
let keywords: [Keyword]
let filter: Filter?
}
extension Query {
/// A typesafe wrapper, modeling a Keyword
struct Keyword: RawRepresentable {
let rawValue: String
}
}
extension Query {
indirect enum Filter {
case equality(Equality)
case operation(BinaryOperation<Query.Filter>)
}
}
extension Query.Filter {
/// A typesafe wrapper, modeling a Property from a Wiki Article
/// e.g.: #house
struct Property: RawRepresentable {
let rawValue: String
}
/// A typesafe wrapper modeling a value to compare
/// e.g.: Stark
struct Value: RawRepresentable {
let rawValue: String
}
/// A check for equality
/// e.g.: #house = Stark
struct Equality {
let property: Property
let value: Value
}
}
// MARK: - Operators
extension Query.Filter: MemberOfBinaryOperation {
// All operators between Filters
enum Operator: Int, CaseIterable, Comparable {
case and
case or
}
init(from operation: BinaryOperation<Query.Filter>) {
self = .operation(operation)
}
}
extension Query.Filter.Operator: BinaryOperator {
var token: Query.Token {
switch self {
case .and:
return .and
case .or:
return .or
}
}
}
// MARK: - Tokens & Lexer
extension Query {
enum Token: TokenProtocol {
case hash
case equals
case openParenthesis
case closeParenthesis
case word(String)
case string(String)
}
}
extension Query.Token {
static let and = Query.Token.word("and")
static let or = Query.Token.word("or")
}
extension Query {
enum Lexer: GeneratorLexer {
typealias Token = Query.Token
// Generators that will be used to create Tokens
static let generators: Generators = [
// Map special characters
RegexTokenGenerator(pattern: "\\(").map(to: .openParenthesis),
RegexTokenGenerator(pattern: "\\)").map(to: .closeParenthesis),
RegexTokenGenerator(pattern: "#").map(to: .hash),
RegexTokenGenerator(pattern: "=").map(to: .equals),
// Map words and String Literals
RegexTokenGenerator(pattern: "\\w+\\b").map(Token.word),
StringLiteralTokenGenerator().map(Token.string),
// Ignore white spaces
WhiteSpaceTokenGenerator().ignore(),
]
}
}
// MARK: - Helper properties for Tokens
extension Query.Token {
var word: String? {
guard case .word(let word) = self else { return nil }
return word
}
var stringLiteral: String? {
guard case .string(let string) = self else { return nil }
return string
}
}
// MARK: - Parsing
extension Query.Keyword: Parsable {
typealias Token = Query.Token
static let parser: AnyParser<Query.Token, Query.Keyword> = AnyParser
.consuming(keyPath: \.word)
.map(Query.Keyword.init)
}
extension Query.Filter.Property: Parsable {
typealias Token = Query.Token
static let parser: AnyParser<Query.Token, Query.Filter.Property> =
.hash &&
AnyParser
.consuming(keyPath: \.word)
.map(Query.Filter.Property.init)
}
extension Query.Filter.Value: Parsable {
typealias Token = Query.Token
static let parser: AnyParser<Query.Token, Query.Filter.Value> =
AnyParser.consuming(keyPath: \.word).map(Query.Filter.Value.init) ||
AnyParser.consuming(keyPath: \.stringLiteral).map(Query.Filter.Value.init)
}
extension Query.Filter.Equality: Parsable {
typealias Token = Query.Token
static let parser: AnyParser<Query.Token, Query.Filter.Equality> = {
let parser = Query.Filter.Property.self && .equals && Query.Filter.Value.self
return parser.map { Query.Filter.Equality(property: $0, value: $1) }
}()
}
extension Query.Filter: Parsable {
typealias Token = Query.Token
static let parser: AnyParser<Query.Token, Query.Filter> = BinaryOperation.map(Query.Filter.operation) ||
Query.Filter.Equality.map(Query.Filter.equality) ||
Query.Filter.wrapped(by: .openParenthesis, and: .closeParenthesis)
}
extension Query: Parsable {
static let parser: AnyParser<Query.Token, Query> = {
let parser = Keyword.self* && Filter?.self
return parser.map { Query(keywords: $0, filter: $1) }
}()
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment