Adding Support for arrays. TESTs do not pass, need to fix.

git-svn-id: https://svn.tlawal.org/svn/monkey@60 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
This commit is contained in:
Tijani Lawal 2022-11-24 20:02:19 +00:00
parent 40de215f70
commit ef88c389f0
6 changed files with 166 additions and 1 deletions

View File

@ -296,3 +296,48 @@ type StringLiteral struct {
func (sl *StringLiteral) expression_node() {} func (sl *StringLiteral) expression_node() {}
func (sl *StringLiteral) TokenLiteral() string { return sl.Token.Literal } func (sl *StringLiteral) TokenLiteral() string { return sl.Token.Literal }
func (sl *StringLiteral) String() string { return sl.Token.Literal } func (sl *StringLiteral) String() string { return sl.Token.Literal }
// Array Literal
type ArrayLiteral struct {
Token token.Token // the '[' token
Elements []Expression
}
func (al *ArrayLiteral) expression_node() {}
func (al *ArrayLiteral) TokenLiteral() string { return al.Token.Literal }
func (al *ArrayLiteral) String() string {
var out bytes.Buffer
elements := []string{}
for _, el := range al.Elements {
elements = append(elements, el.String())
}
out.WriteString("[")
out.WriteString(strings.Join(elements, ", "))
out.WriteString("]")
return out.String()
}
// Array Index Operator Expression
type IndexExpression struct {
Token token.Token // The [ token
Left Expression
Index Expression
}
func (ie *IndexExpression) expression_node() {}
func (ie *IndexExpression) TokenLiteral() string { return ie.Token.Literal }
func (ie *IndexExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(ie.Left.String())
out.WriteString("[")
out.WriteString(ie.Index.String())
out.WriteString("]")
return out.String()
}
// Stopped at page 169

View File

@ -65,6 +65,10 @@ func (l_lexer *Lexer) NextToken() token.Token {
case '"': case '"':
tok.Type = token.STRING tok.Type = token.STRING
tok.Literal = l_lexer.read_string() tok.Literal = l_lexer.read_string()
case '[':
tok = new_token(token.LBRACKET, l_lexer.current_char)
case ']':
tok = new_token(token.RBRACKET, l_lexer.current_char)
case 0: case 0:
tok.Literal = "" tok.Literal = ""
tok.Type = token.EOF tok.Type = token.EOF

View File

@ -27,6 +27,7 @@ func TestNextToken(t *testing.T) {
10 != 9; 10 != 9;
"foobar" "foobar"
"foo babr" "foo babr"
[1,2];
` `
tests := []struct { tests := []struct {
expectedType token.TokenType expectedType token.TokenType
@ -116,6 +117,13 @@ func TestNextToken(t *testing.T) {
{token.STRING, "foobar"}, {token.STRING, "foobar"},
{token.STRING, "foo babr"}, {token.STRING, "foo babr"},
{token.LBRACKET, "["},
{token.INT, "1"},
{token.COMMA, ","},
{token.INT, "2"},
{token.RBRACKET, "]"},
{token.SEMICOLON, ";"},
{token.EOF, ""}, {token.EOF, ""},
} }

View File

@ -18,6 +18,7 @@ const (
PRODUCT // * PRODUCT // *
PREFIX // -x OR !x PREFIX // -x OR !x
CALL // simple_function(x) CALL // simple_function(x)
INDEX // array[index[
) )
// Precedence Table // Precedence Table
@ -31,6 +32,7 @@ var precedences = map[token.TokenType]int{
token.SLASH: PRODUCT, token.SLASH: PRODUCT,
token.ASTERISK: PRODUCT, token.ASTERISK: PRODUCT,
token.LPAREN: CALL, token.LPAREN: CALL,
token.LBRACKET: INDEX,
} }
func (l_parser *Parser) peek_precedence() int { func (l_parser *Parser) peek_precedence() int {
@ -116,6 +118,10 @@ func New(l_lexer *lexer.Lexer) *Parser {
// String // String
l_parser.register_prefix(token.STRING, l_parser.parse_string_literal) l_parser.register_prefix(token.STRING, l_parser.parse_string_literal)
// Array
l_parser.register_prefix(token.LBRACKET, l_parser.parse_array_literal)
l_parser.register_infix(token.LBRACKET, l_parser.parse_index_expression)
return l_parser return l_parser
} }
@ -407,7 +413,7 @@ func (l_parser *Parser) parse_function_parameters() []*ast.Identifier {
func (l_parser *Parser) parse_call_expression(function ast.Expression) ast.Expression { func (l_parser *Parser) parse_call_expression(function ast.Expression) ast.Expression {
// defer untrace(trace("parse_call_expression")) // defer untrace(trace("parse_call_expression"))
expression := &ast.CallExpression{Token: l_parser.current_token, Function: function} expression := &ast.CallExpression{Token: l_parser.current_token, Function: function}
expression.Arguments = l_parser.parse_call_arguments() expression.Arguments = l_parser.parse_expression_list(token.RPAREN)
return expression return expression
} }
@ -441,3 +447,46 @@ func (l_parser *Parser) parse_string_literal() ast.Expression {
Value: l_parser.current_token.Literal, Value: l_parser.current_token.Literal,
} }
} }
// Array
func (l_parser *Parser) parse_array_literal() ast.Expression {
array := &ast.ArrayLiteral{Token: l_parser.current_token}
array.Elements = l_parser.parse_expression_list(token.RBRACKET)
return array
}
func (l_parser *Parser) parse_expression_list(end token.TokenType) []ast.Expression {
list := []ast.Expression{}
if l_parser.peek_token_is(end) {
l_parser.next_token()
return list
}
l_parser.next_token()
list = append(list, l_parser.parse_expression(LOWEST))
for l_parser.peek_token_is(token.COMMA) {
l_parser.next_token()
l_parser.next_token()
list = append(list, l_parser.parse_expression(LOWEST))
}
if !l_parser.expect_peek(end) {
return nil
}
return list
}
func (l_parser *Parser) parse_index_expression(left ast.Expression) ast.Expression {
expression := &ast.IndexExpression{Token: l_parser.current_token, Left: left}
l_parser.next_token()
expression.Index = l_parser.parse_expression(LOWEST)
if !l_parser.expect_peek(token.RBRACKET) {
return nil
}
return expression
}

View File

@ -322,6 +322,14 @@ func TestOperatorPrecedenceParsing(l_test *testing.T) {
"add(a + b + c * d / f + g)", "add(a + b + c * d / f + g)",
"add((((a + b) + ((c * d) / f)) + g))", "add((((a + b) + ((c * d) / f)) + g))",
}, },
{
"a * [1, 2, 3, 4][b * c] * d",
"((a * ([1, 2, 3, 4][(b * c)])) * d)",
},
{
"add(a * b[2], b[1], 2 * [1, 2][1])",
"add((a * (b[2])), (b[1]), (2 * ([1, 2][1])))",
},
} }
for _, tt := range tests { for _, tt := range tests {
l_lexer := lexer.New(tt.input) l_lexer := lexer.New(tt.input)
@ -676,6 +684,54 @@ func TestStringLiteralExpression(l_test *testing.T) {
} }
} }
func TestParsingArrayLiterals(l_test *testing.T) {
input := "[1, 2 * 2, 3 + 3]"
l_lexer := lexer.New(input)
l_parser := New(l_lexer)
program := l_parser.ParseProgram()
check_parser_errors(l_test, l_parser)
statement, ok := program.Statements[0].(*ast.ExpressionStatement)
array, ok := statement.Expression.(*ast.ArrayLiteral)
if !ok {
l_test.Fatalf("expression is not ast.ArrayLiteral, got=%T", statement.Expression)
}
if len(array.Elements) != 3 {
l_test.Fatalf("len(array.Elements) not 3, got=%d", len(array.Elements))
}
testIntegerLiteral(l_test, array.Elements[0], 1)
testInfixExpression(l_test, array.Elements[1], 2, "*", 2)
testInfixExpression(l_test, array.Elements[2], 3, "+", 3)
}
func TestParsingIndexExpressions(l_test *testing.T) {
input := "my_array[1+1]"
l_lexer := lexer.New(input)
l_parser := New(l_lexer)
program := l_parser.ParseProgram()
check_parser_errors(l_test, l_parser)
statement, ok := program.Statements[0].(*ast.ExpressionStatement)
index_expression, ok := statement.Expression.(*ast.IndexExpression)
if !ok {
l_test.Fatalf("expression is not *ast.IndexExpression, got=%T", statement.Expression)
}
if !testIdentifier(l_test, index_expression.Left, "my_array") {
return
}
if !testInfixExpression(l_test, index_expression.Index, 1, "+", 1) {
return
}
}
// Helpers // Helpers
func check_parser_errors(l_test *testing.T, l_parser *Parser) { func check_parser_errors(l_test *testing.T, l_parser *Parser) {

View File

@ -38,6 +38,9 @@ const (
LBRACE = "{" LBRACE = "{"
RBRACE = "}" RBRACE = "}"
LBRACKET = "["
RBRACKET = "]"
// Keywords // Keywords
FUNCTION = "FUNCTION" FUNCTION = "FUNCTION"
LET = "LET" LET = "LET"