From ef88c389f027d1862a5dddf5e7f1a0520adf6974 Mon Sep 17 00:00:00 2001 From: tijani Date: Thu, 24 Nov 2022 20:02:19 +0000 Subject: [PATCH] Adding Support for arrays. TESTs do not pass, need to fix. git-svn-id: https://svn.tlawal.org/svn/monkey@60 f6afcba9-9ef1-4bdd-9b72-7484f5705bac --- ast/ast.go | 45 ++++++++++++++++++++++++++++++++++ lexer/lexer.go | 4 ++++ lexer/lexer_test.go | 8 +++++++ parser/parser.go | 51 ++++++++++++++++++++++++++++++++++++++- parser/parser_test.go | 56 +++++++++++++++++++++++++++++++++++++++++++ token/tokens.go | 3 +++ 6 files changed, 166 insertions(+), 1 deletion(-) diff --git a/ast/ast.go b/ast/ast.go index 84b480b..6ac8496 100644 --- a/ast/ast.go +++ b/ast/ast.go @@ -296,3 +296,48 @@ type StringLiteral struct { func (sl *StringLiteral) expression_node() {} func (sl *StringLiteral) TokenLiteral() string { return sl.Token.Literal } func (sl *StringLiteral) String() string { return sl.Token.Literal } + +// Array Literal +type ArrayLiteral struct { + Token token.Token // the '[' token + Elements []Expression +} + +func (al *ArrayLiteral) expression_node() {} +func (al *ArrayLiteral) TokenLiteral() string { return al.Token.Literal } +func (al *ArrayLiteral) String() string { + var out bytes.Buffer + elements := []string{} + + for _, el := range al.Elements { + elements = append(elements, el.String()) + } + + out.WriteString("[") + out.WriteString(strings.Join(elements, ", ")) + out.WriteString("]") + + return out.String() +} + +// Array Index Operator Expression +type IndexExpression struct { + Token token.Token // The [ token + Left Expression + Index Expression +} + +func (ie *IndexExpression) expression_node() {} +func (ie *IndexExpression) TokenLiteral() string { return ie.Token.Literal } +func (ie *IndexExpression) String() string { + var out bytes.Buffer + out.WriteString("(") + out.WriteString(ie.Left.String()) + out.WriteString("[") + out.WriteString(ie.Index.String()) + out.WriteString("]") + + return out.String() +} + +// Stopped at page 169 diff --git a/lexer/lexer.go b/lexer/lexer.go index 0f472a7..f08beb2 100644 --- a/lexer/lexer.go +++ b/lexer/lexer.go @@ -65,6 +65,10 @@ func (l_lexer *Lexer) NextToken() token.Token { case '"': tok.Type = token.STRING tok.Literal = l_lexer.read_string() + case '[': + tok = new_token(token.LBRACKET, l_lexer.current_char) + case ']': + tok = new_token(token.RBRACKET, l_lexer.current_char) case 0: tok.Literal = "" tok.Type = token.EOF diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go index b3930bc..326f6eb 100644 --- a/lexer/lexer_test.go +++ b/lexer/lexer_test.go @@ -27,6 +27,7 @@ func TestNextToken(t *testing.T) { 10 != 9; "foobar" "foo babr" + [1,2]; ` tests := []struct { expectedType token.TokenType @@ -116,6 +117,13 @@ func TestNextToken(t *testing.T) { {token.STRING, "foobar"}, {token.STRING, "foo babr"}, + {token.LBRACKET, "["}, + {token.INT, "1"}, + {token.COMMA, ","}, + {token.INT, "2"}, + {token.RBRACKET, "]"}, + {token.SEMICOLON, ";"}, + {token.EOF, ""}, } diff --git a/parser/parser.go b/parser/parser.go index eb97a76..217d287 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -18,6 +18,7 @@ const ( PRODUCT // * PREFIX // -x OR !x CALL // simple_function(x) + INDEX // array[index[ ) // Precedence Table @@ -31,6 +32,7 @@ var precedences = map[token.TokenType]int{ token.SLASH: PRODUCT, token.ASTERISK: PRODUCT, token.LPAREN: CALL, + token.LBRACKET: INDEX, } func (l_parser *Parser) peek_precedence() int { @@ -116,6 +118,10 @@ func New(l_lexer *lexer.Lexer) *Parser { // String l_parser.register_prefix(token.STRING, l_parser.parse_string_literal) + // Array + l_parser.register_prefix(token.LBRACKET, l_parser.parse_array_literal) + l_parser.register_infix(token.LBRACKET, l_parser.parse_index_expression) + return l_parser } @@ -407,7 +413,7 @@ func (l_parser *Parser) parse_function_parameters() []*ast.Identifier { func (l_parser *Parser) parse_call_expression(function ast.Expression) ast.Expression { // defer untrace(trace("parse_call_expression")) expression := &ast.CallExpression{Token: l_parser.current_token, Function: function} - expression.Arguments = l_parser.parse_call_arguments() + expression.Arguments = l_parser.parse_expression_list(token.RPAREN) return expression } @@ -441,3 +447,46 @@ func (l_parser *Parser) parse_string_literal() ast.Expression { Value: l_parser.current_token.Literal, } } + +// Array +func (l_parser *Parser) parse_array_literal() ast.Expression { + array := &ast.ArrayLiteral{Token: l_parser.current_token} + array.Elements = l_parser.parse_expression_list(token.RBRACKET) + return array +} + +func (l_parser *Parser) parse_expression_list(end token.TokenType) []ast.Expression { + list := []ast.Expression{} + + if l_parser.peek_token_is(end) { + l_parser.next_token() + return list + } + + l_parser.next_token() + list = append(list, l_parser.parse_expression(LOWEST)) + + for l_parser.peek_token_is(token.COMMA) { + l_parser.next_token() + l_parser.next_token() + list = append(list, l_parser.parse_expression(LOWEST)) + } + + if !l_parser.expect_peek(end) { + return nil + } + + return list +} + +func (l_parser *Parser) parse_index_expression(left ast.Expression) ast.Expression { + expression := &ast.IndexExpression{Token: l_parser.current_token, Left: left} + l_parser.next_token() + + expression.Index = l_parser.parse_expression(LOWEST) + if !l_parser.expect_peek(token.RBRACKET) { + return nil + } + + return expression +} diff --git a/parser/parser_test.go b/parser/parser_test.go index 4345034..eb2adc5 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -322,6 +322,14 @@ func TestOperatorPrecedenceParsing(l_test *testing.T) { "add(a + b + c * d / f + g)", "add((((a + b) + ((c * d) / f)) + g))", }, + { + "a * [1, 2, 3, 4][b * c] * d", + "((a * ([1, 2, 3, 4][(b * c)])) * d)", + }, + { + "add(a * b[2], b[1], 2 * [1, 2][1])", + "add((a * (b[2])), (b[1]), (2 * ([1, 2][1])))", + }, } for _, tt := range tests { l_lexer := lexer.New(tt.input) @@ -676,6 +684,54 @@ func TestStringLiteralExpression(l_test *testing.T) { } } +func TestParsingArrayLiterals(l_test *testing.T) { + input := "[1, 2 * 2, 3 + 3]" + + l_lexer := lexer.New(input) + l_parser := New(l_lexer) + program := l_parser.ParseProgram() + check_parser_errors(l_test, l_parser) + + statement, ok := program.Statements[0].(*ast.ExpressionStatement) + array, ok := statement.Expression.(*ast.ArrayLiteral) + + if !ok { + l_test.Fatalf("expression is not ast.ArrayLiteral, got=%T", statement.Expression) + } + + if len(array.Elements) != 3 { + l_test.Fatalf("len(array.Elements) not 3, got=%d", len(array.Elements)) + } + + testIntegerLiteral(l_test, array.Elements[0], 1) + testInfixExpression(l_test, array.Elements[1], 2, "*", 2) + testInfixExpression(l_test, array.Elements[2], 3, "+", 3) +} + +func TestParsingIndexExpressions(l_test *testing.T) { + input := "my_array[1+1]" + + l_lexer := lexer.New(input) + l_parser := New(l_lexer) + program := l_parser.ParseProgram() + check_parser_errors(l_test, l_parser) + + statement, ok := program.Statements[0].(*ast.ExpressionStatement) + index_expression, ok := statement.Expression.(*ast.IndexExpression) + + if !ok { + l_test.Fatalf("expression is not *ast.IndexExpression, got=%T", statement.Expression) + } + + if !testIdentifier(l_test, index_expression.Left, "my_array") { + return + } + + if !testInfixExpression(l_test, index_expression.Index, 1, "+", 1) { + return + } +} + // Helpers func check_parser_errors(l_test *testing.T, l_parser *Parser) { diff --git a/token/tokens.go b/token/tokens.go index b8d980b..f317b93 100644 --- a/token/tokens.go +++ b/token/tokens.go @@ -38,6 +38,9 @@ const ( LBRACE = "{" RBRACE = "}" + LBRACKET = "[" + RBRACKET = "]" + // Keywords FUNCTION = "FUNCTION" LET = "LET"