Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified pkg/darwin_amd64/monkey/ast.a
Binary file not shown.
26 changes: 26 additions & 0 deletions src/monkey/ast/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -259,3 +259,29 @@ func (fl *FunctionLiteral) String() string {

return out.String()
}

type CallExpression struct {
Token token.Token // '(' トークン
Function Expression // Identifier または FunctionLiteral
Arguments []Expression
}

func (ce *CallExpression) expressionNode() {}
func (ce *CallExpression) TokenLiteral() string {
return ce.Token.Literal
}
func (ce *CallExpression) String() string {
var out bytes.Buffer

args := []string{}
for _, a := range ce.Arguments {
args = append(args, a.String())
}

out.WriteString(ce.Function.String())
out.WriteString("(")
out.WriteString(strings.Join(args, ", "))
out.WriteString(")")

return out.String()
}
115 changes: 79 additions & 36 deletions src/monkey/parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ var precedences = map[token.TokenType]int{
token.MINUS: SUM,
token.SLASH: PRODUCT,
token.ASTERISK: PRODUCT,
token.LPAREN: CALL,
}

type (
Expand Down Expand Up @@ -76,6 +77,8 @@ func New(l *lexer.Lexer) *Parser {
p.registerInfix(token.LT, p.parseInfixExpression)
p.registerInfix(token.GT, p.parseInfixExpression)

p.registerInfix(token.LPAREN, p.parseCallExpression)

// 2つトークンを読み込む。
// curTokenとpeekTokenの両方がセットされる
p.nextToken()
Expand All @@ -84,11 +87,6 @@ func New(l *lexer.Lexer) *Parser {
return p
}

func (p *Parser) nextToken() {
p.curToken = p.peekToken
p.peekToken = p.l.NextToken()
}

func (p *Parser) ParseProgram() *ast.Program {
program := &ast.Program{}
program.Statements = []ast.Statement{}
Expand All @@ -103,6 +101,15 @@ func (p *Parser) ParseProgram() *ast.Program {
return program
}

func (p *Parser) Errors() []string {
return p.errors
}

func (p *Parser) nextToken() {
p.curToken = p.peekToken
p.peekToken = p.l.NextToken()
}

func (p *Parser) parseStatement() ast.Statement {
switch p.curToken.Type {
case token.LET:
Expand All @@ -115,6 +122,7 @@ func (p *Parser) parseStatement() ast.Statement {
}

func (p *Parser) parseLetStatement() *ast.LetStatement {
defer untrace(trace("parseLetStatement"))
stmt := &ast.LetStatement{Token: p.curToken}

if !p.expectPeek(token.IDENT) {
Expand All @@ -135,6 +143,7 @@ func (p *Parser) parseLetStatement() *ast.LetStatement {
}

func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
defer untrace(trace("parseReturnStatement"))
stmt := &ast.ReturnStatement{Token: p.curToken}
p.nextToken()
// TODO: セミコロンに遭遇するまで式を読み飛ばしてしまっている
Expand All @@ -155,32 +164,6 @@ func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
return stmt
}

func (p *Parser) curTokenIs(t token.TokenType) bool {
return p.curToken.Type == t
}

func (p *Parser) peekTokenIs(t token.TokenType) bool {
return p.peekToken.Type == t
}

func (p *Parser) expectPeek(t token.TokenType) bool {
if p.peekTokenIs(t) {
p.nextToken()
return true
}
p.peekError(t)
return false
}

func (p *Parser) Errors() []string {
return p.errors
}

func (p *Parser) peekError(t token.TokenType) {
msg := fmt.Sprintf("expected next token to be %s, got %s insted", t, p.peekToken.Type)
p.errors = append(p.errors, msg)
}

func (p *Parser) registerPrefix(tokenType token.TokenType, fn prefixParseFn) {
p.prefixParseFns[tokenType] = fn
}
Expand Down Expand Up @@ -214,12 +197,8 @@ func (p *Parser) parseExpression(precedence int) ast.Expression {
return leftExp
}

func (p *Parser) noPrefixParseFnError(t token.TokenType) {
msg := fmt.Sprintf("no prefix parse function for %s found", t)
p.errors = append(p.errors, msg)
}

func (p *Parser) parseIdentifier() ast.Expression {
defer untrace(trace("parseIdentifier"))
return &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
}

Expand Down Expand Up @@ -271,6 +250,7 @@ func (p *Parser) parseBoolean() ast.Expression {
}

func (p *Parser) parseGroupedExpression() ast.Expression {
defer untrace(trace("parseGroupExpression"))
p.nextToken()

exp := p.parseExpression(LOWEST)
Expand All @@ -281,6 +261,7 @@ func (p *Parser) parseGroupedExpression() ast.Expression {
}

func (p *Parser) parseIfExpression() ast.Expression {
defer untrace(trace("parseIfExpression"))
expression := &ast.IfExpression{Token: p.curToken}
if !p.expectPeek(token.LPAREN) {
return nil
Expand Down Expand Up @@ -312,6 +293,7 @@ func (p *Parser) parseIfExpression() ast.Expression {
}

func (p *Parser) parseBlockStatement() *ast.BlockStatement {
defer untrace(trace("parseBlockStatement"))
block := &ast.BlockStatement{Token: p.curToken}
block.Statements = []ast.Statement{}

Expand All @@ -329,6 +311,7 @@ func (p *Parser) parseBlockStatement() *ast.BlockStatement {
}

func (p *Parser) parseFunctionLiteral() ast.Expression {
defer untrace(trace("parseFunctionLiteral"))
lit := &ast.FunctionLiteral{Token: p.curToken}
if !p.expectPeek(token.LPAREN) {
return nil
Expand All @@ -345,6 +328,7 @@ func (p *Parser) parseFunctionLiteral() ast.Expression {
}

func (p *Parser) parseFunctionParameters() []*ast.Identifier {
defer untrace(trace("parseFunctionParameters"))
identifiers := []*ast.Identifier{}

if p.peekTokenIs(token.RPAREN) {
Expand All @@ -371,6 +355,55 @@ func (p *Parser) parseFunctionParameters() []*ast.Identifier {
return identifiers
}

func (p *Parser) parseCallExpression(function ast.Expression) ast.Expression {
defer untrace(trace("parseCallExpression"))
exp := &ast.CallExpression{Token: p.curToken, Function: function}
exp.Arguments = p.parseCallArguments()
return exp
}

func (p *Parser) parseCallArguments() []ast.Expression {
defer untrace(trace("parseCallArguments"))
args := []ast.Expression{}

if p.peekTokenIs(token.RPAREN) {
p.nextToken()
return args
}

p.nextToken()
args = append(args, p.parseExpression(LOWEST))

for p.peekTokenIs(token.COMMA) {
p.nextToken()
p.nextToken()
args = append(args, p.parseExpression(LOWEST))
}

if !p.expectPeek(token.RPAREN) {
return nil
}

return args
}

func (p *Parser) curTokenIs(t token.TokenType) bool {
return p.curToken.Type == t
}

func (p *Parser) peekTokenIs(t token.TokenType) bool {
return p.peekToken.Type == t
}

func (p *Parser) expectPeek(t token.TokenType) bool {
if p.peekTokenIs(t) {
p.nextToken()
return true
}
p.peekError(t)
return false
}

func (p *Parser) peekPrecedence() int {
if p, ok := precedences[p.peekToken.Type]; ok {
return p
Expand All @@ -386,3 +419,13 @@ func (p *Parser) curPrecedence() int {

return LOWEST
}

func (p *Parser) noPrefixParseFnError(t token.TokenType) {
msg := fmt.Sprintf("no prefix parse function for %s found", t)
p.errors = append(p.errors, msg)
}

func (p *Parser) peekError(t token.TokenType) {
msg := fmt.Sprintf("expected next token to be %s, got %s insted", t, p.peekToken.Type)
p.errors = append(p.errors, msg)
}
47 changes: 47 additions & 0 deletions src/monkey/parser/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,18 @@ func TestOperatorPrecedenceParsing(t *testing.T) {
"!(true == true)",
"(!(true == true))",
},
{
"a + add(b * c) + d",
"((a + add((b * c))) + d)",
},
{
"add(a, b, 1, 2 * 3, 4 + 5, add(6, 7 * 8))",
"add(a, b, 1, (2 * 3), (4 + 5), add(6, (7 * 8)))",
},
{
"add(a + b + c * d / f + g)",
"add((((a + b) + ((c * d) / f)) + g))",
},
}

for _, tt := range tests {
Expand Down Expand Up @@ -526,6 +538,41 @@ func TestFunctionParameterParsing(t *testing.T) {

}

func TestCallExpressionParsing(t *testing.T) {
input := `add(1, 2 * 3, 4 + 5)`

l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
checkParserErrors(t, p)

if len(program.Statements) != 1 {
t.Fatalf("program.Statements does not contain %d statements. got=%d\n", 1, len(program.Statements))
}

stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
if !ok {
t.Fatalf("stmt is not ast.ExpressionStatement. got=%T", program.Statements[0])
}

exp, ok := stmt.Expression.(*ast.CallExpression)
if !ok {
t.Fatalf("stmt.Expression is not ast.CallExpression. got=%T", stmt.Expression)
}

if !testIdentifier(t, exp.Function, "add") {
return
}

if len(exp.Arguments) != 3 {
t.Fatalf("wrong length of arguments. got=%d", len(exp.Arguments))
}

testLiteralExpression(t, exp.Arguments[0], 1)
testInfixExpression(t, exp.Arguments[1], 2, "*", 3)
testInfixExpression(t, exp.Arguments[2], 4, "+", 5)
}

func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
if s.TokenLiteral() != "let" {
t.Errorf("s.TokenLiteral not 'let'. got=%q", s.TokenLiteral())
Expand Down