Skip to content

Instantly share code, notes, and snippets.

@mrnugget
Last active July 18, 2017 16:19
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save mrnugget/69af6e37a4983f07532f4b677e0a334b to your computer and use it in GitHub Desktop.
These changes add support for fat arrow functions to Monkey's parser. E.g. `(a, b) => { a + b }`. The current implementation expects that the body is a block statement, i.e. it needs curly braces.
diff --git src/monkey/ast/ast.go src/monkey/ast/ast.go
index fb30b05..70668a6 100644
--- src/monkey/ast/ast.go
+++ src/monkey/ast/ast.go
@@ -337,3 +337,28 @@ func (hl *HashLiteral) String() string {
return out.String()
}
+
+type FatArrowFunction struct {
+ Token token.Token // The 'fn' token
+ Parameters []*Identifier
+ Body *BlockStatement
+}
+
+func (fl *FatArrowFunction) expressionNode() {}
+func (fl *FatArrowFunction) TokenLiteral() string { return fl.Token.Literal }
+func (fl *FatArrowFunction) String() string {
+ var out bytes.Buffer
+
+ params := []string{}
+ for _, p := range fl.Parameters {
+ params = append(params, p.String())
+ }
+
+ out.WriteString(fl.TokenLiteral())
+ out.WriteString("(")
+ out.WriteString(strings.Join(params, ", "))
+ out.WriteString(") ")
+ out.WriteString(fl.Body.String())
+
+ return out.String()
+}
diff --git src/monkey/lexer/lexer.go src/monkey/lexer/lexer.go
index db4f5f7..cbf974e 100644
--- src/monkey/lexer/lexer.go
+++ src/monkey/lexer/lexer.go
@@ -27,6 +27,11 @@ func (l *Lexer) NextToken() token.Token {
l.readChar()
literal := string(ch) + string(l.ch)
tok = token.Token{Type: token.EQ, Literal: literal}
+ } else if l.peekChar() == '>' {
+ ch := l.ch
+ l.readChar()
+ literal := string(ch) + string(l.ch)
+ tok = token.Token{Type: token.ARROW, Literal: literal}
} else {
tok = newToken(token.ASSIGN, l.ch)
}
diff --git src/monkey/lexer/lexer_test.go src/monkey/lexer/lexer_test.go
index e4e64a4..711bdf8 100644
--- src/monkey/lexer/lexer_test.go
+++ src/monkey/lexer/lexer_test.go
@@ -30,6 +30,7 @@ if (5 < 10) {
"foo bar"
[1, 2];
{"foo": "bar"}
+=>
`
tests := []struct {
@@ -122,6 +123,7 @@ if (5 < 10) {
{token.COLON, ":"},
{token.STRING, "bar"},
{token.RBRACE, "}"},
+ {token.ARROW, "=>"},
{token.EOF, ""},
}
diff --git src/monkey/parser/parser.go src/monkey/parser/parser.go
index 0a1f19c..35aca79 100644
--- src/monkey/parser/parser.go
+++ src/monkey/parser/parser.go
@@ -63,7 +63,7 @@ func New(l *lexer.Lexer) *Parser {
p.registerPrefix(token.MINUS, p.parsePrefixExpression)
p.registerPrefix(token.TRUE, p.parseBoolean)
p.registerPrefix(token.FALSE, p.parseBoolean)
- p.registerPrefix(token.LPAREN, p.parseGroupedExpression)
+ p.registerPrefix(token.LPAREN, p.parseGroupedExpressionOrFatArrowFunction)
p.registerPrefix(token.IF, p.parseIfExpression)
p.registerPrefix(token.FUNCTION, p.parseFunctionLiteral)
p.registerPrefix(token.LBRACKET, p.parseArrayLiteral)
@@ -295,9 +295,34 @@ func (p *Parser) parseBoolean() ast.Expression {
return &ast.Boolean{Token: p.curToken, Value: p.curTokenIs(token.TRUE)}
}
-func (p *Parser) parseGroupedExpression() ast.Expression {
+func (p *Parser) parseGroupedExpressionOrFatArrowFunction() ast.Expression {
p.nextToken()
+ // We're sitting on the first token after the `(`.
+ // If the current token is a `)` and the next `=>` then we're sitting on
+ // an empty params list for a fat arrow function
+ // If the current token is an identifier and the next a `,`, then we're
+ // sitting on a params list with n+1 elems
+ // If the current token is an identifier and the next a `)`, then we're
+ // sitting on a params list with one elem
+ // -> parse the fat arrow function
+ if (p.curTokenIs(token.RPAREN) && p.peekTokenIs(token.ARROW)) ||
+ (p.curTokenIs(token.IDENT) && p.peekTokenIs(token.COMMA)) ||
+ (p.curTokenIs(token.IDENT) && p.peekTokenIs(token.RPAREN)) {
+ fat := &ast.FatArrowFunction{Token: p.curToken}
+ fat.Parameters = p.parseFunctionParameters()
+
+ if !p.expectPeek(token.ARROW) {
+ return nil
+ }
+
+ p.nextToken()
+ fat.Body = p.parseBlockStatement()
+
+ return fat
+ }
+
+ // Or we're just sitting in a grouped expression
exp := p.parseExpression(LOWEST)
if !p.expectPeek(token.RPAREN) {
@@ -364,6 +389,8 @@ func (p *Parser) parseFunctionLiteral() ast.Expression {
return nil
}
+ p.nextToken()
+
lit.Parameters = p.parseFunctionParameters()
if !p.expectPeek(token.LBRACE) {
@@ -378,13 +405,10 @@ func (p *Parser) parseFunctionLiteral() ast.Expression {
func (p *Parser) parseFunctionParameters() []*ast.Identifier {
identifiers := []*ast.Identifier{}
- if p.peekTokenIs(token.RPAREN) {
- p.nextToken()
+ if p.curTokenIs(token.RPAREN) {
return identifiers
}
- p.nextToken()
-
ident := &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
identifiers = append(identifiers, ident)
diff --git src/monkey/parser/parser_test.go src/monkey/parser/parser_test.go
index 674487d..9a16b6b 100644
--- src/monkey/parser/parser_test.go
+++ src/monkey/parser/parser_test.go
@@ -936,6 +936,63 @@ func TestParsingHashLiteralsWithExpressions(t *testing.T) {
}
}
+func TestFatArrowFunctionParsing(t *testing.T) {
+ tests := []struct {
+ input string
+ expectedParams []string
+ }{
+ {"(a, b) => { a + b }", []string{"a", "b"}},
+ {"(a) => { a + b }", []string{"a"}},
+ {"() => { a + b }", []string{}},
+ }
+
+ for _, tt := range tests {
+ l := lexer.New(tt.input)
+ p := New(l)
+ program := p.ParseProgram()
+ checkParserErrors(t, p)
+
+ if len(program.Statements) != 1 {
+ t.Fatalf("program.Statements does not contain %d statements. got=%d\n",
+ 1, len(program.Statements))
+ }
+
+ stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+ if !ok {
+ t.Fatalf("program.Statements[0] is not ast.ExpressionStatement. got=%T",
+ program.Statements[0])
+ }
+
+ function, ok := stmt.Expression.(*ast.FatArrowFunction)
+ if !ok {
+ t.Fatalf("stmt.Expression is not ast.FatArrowFunction. got=%T",
+ stmt.Expression)
+ }
+
+ if len(function.Parameters) != len(tt.expectedParams) {
+ t.Fatalf("function literal parameters wrong. want %d, got=%d\n",
+ len(tt.expectedParams), len(function.Parameters))
+ }
+
+ for i, p := range tt.expectedParams {
+ testLiteralExpression(t, function.Parameters[i], p)
+ }
+
+ if len(function.Body.Statements) != 1 {
+ t.Fatalf("function.Body.Statements has not 1 statements. got=%d\n",
+ len(function.Body.Statements))
+ }
+
+ bodyStmt, ok := function.Body.Statements[0].(*ast.ExpressionStatement)
+ if !ok {
+ t.Fatalf("function body stmt is not ast.ExpressionStatement. got=%T",
+ function.Body.Statements[0])
+ }
+
+ testInfixExpression(t, bodyStmt.Expression, "a", "+", "b")
+ }
+}
+
func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
if s.TokenLiteral() != "let" {
t.Errorf("s.TokenLiteral not 'let'. got=%q", s.TokenLiteral())
diff --git src/monkey/token/token.go src/monkey/token/token.go
index 3d2d2f7..b2dad4e 100644
--- src/monkey/token/token.go
+++ src/monkey/token/token.go
@@ -25,6 +25,8 @@ const (
EQ = "=="
NOT_EQ = "!="
+ ARROW = "=>"
+
// Delimiters
COMMA = ","
SEMICOLON = ";"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment