Skip to content

Instantly share code, notes, and snippets.

@TooManyBees
Last active February 9, 2019 20:36
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save TooManyBees/daba13e2504b84a1725ba1ce70d1fc1a to your computer and use it in GitHub Desktop.
Save TooManyBees/daba13e2504b84a1725ba1ce70d1fc1a to your computer and use it in GitHub Desktop.
simple lispy interpreter
use std::mem;
use std::io::{self, Read};
#[derive(Debug, PartialEq, Eq)]
enum Token {
Paren,
Keyword(String),
Expression(Vec<Token>),
}
fn tokenize(input: &str) -> Token {
let mut value = String::new();
let mut tokens: Vec<Token> = Vec::new();
for c in input.chars() {
match c {
'(' => {
maybe_add_token(&mut value, &mut tokens);
tokens.push(Token::Paren)
},
')' => {
maybe_add_token(&mut value, &mut tokens);
let expr = pop_expression(&mut tokens);
tokens.push(expr);
},
' ' => maybe_add_token(&mut value, &mut tokens),
_ => value.push(c),
}
}
// We should have continuously reduced the vector of tokens down to a single
// expression. If not, it's bad input.
assert!(tokens.len() == 1,
"Input string should have reduced to a single expression, found {} instead",
tokens.len()
);
tokens.pop().unwrap()
}
// Add the currently-parsed token to the list of parsed tokens, but only
// if it's not empty.
fn maybe_add_token(token: &mut String, tokens: &mut Vec<Token>) {
if !token.is_empty() {
let token_string = mem::replace(token, String::new());
tokens.push(Token::Keyword(token_string));
}
}
// Slice off tokens between the last open paren, and the end of the token list.
// This is called when a closing paren is encountered, so if we never find a
// matching open paren, it's bad input.
fn pop_expression(tokens: &mut Vec<Token>) -> Token {
let mut result = Vec::new();
while let Some(t) = tokens.pop() {
match t {
Token::Paren => {
result.reverse();
return Token::Expression(result);
},
_ => result.push(t),
}
}
panic!("Unmatched parens (missing open paren)");
}
fn main() -> io::Result<()> {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer)?;
let ast = tokenize(&buffer);
println!("{:?}", ast);
Ok(())
}
#[cfg(test)]
mod test {
use super::Token::*;
use super::tokenize;
#[test]
fn test_tokenize() {
let input = "(first (list 1 (+ 2 3) 9))";
let result = tokenize(&input);
let expected = Expression(vec![
Keyword("first".into()),
Expression(vec![
Keyword("list".into()),
Keyword("1".into()),
Expression(vec![
Keyword("+".into()),
Keyword("2".into()),
Keyword("3".into()),
]),
Keyword("9".into()),
]),
]);
assert_eq!(expected, result);
}
#[test]
#[should_panic]
fn test_missing_leading_paren() {
let input = "first (list 1 2 3))";
tokenize(&input);
}
#[test]
#[should_panic]
fn test_extra_paren() {
let input = "(first (list 1 2 3)))";
tokenize(&input);
}
#[test]
#[should_panic]
fn test_missing_trailing_paren() {
let input = "(first (list 1 2 3)";
tokenize(&input);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment