Interpreter complete.

git-svn-id: https://svn.tlawal.org/svn/monkey@62 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
This commit is contained in:
Tijani Lawal 2023-05-05 19:15:39 +00:00
parent a3b39f1988
commit 4f3ef7d314
11 changed files with 39 additions and 400 deletions

View File

@ -286,58 +286,3 @@ func (ce *CallExpression) String() string {
return out.String()
}
// String Literal
type StringLiteral struct {
Token token.Token
Value string
}
func (sl *StringLiteral) expression_node() {}
func (sl *StringLiteral) TokenLiteral() string { return sl.Token.Literal }
func (sl *StringLiteral) String() string { return sl.Token.Literal }
// Array Literal
type ArrayLiteral struct {
Token token.Token // the '[' token
Elements []Expression
}
func (al *ArrayLiteral) expression_node() {}
func (al *ArrayLiteral) TokenLiteral() string { return al.Token.Literal }
func (al *ArrayLiteral) String() string {
var out bytes.Buffer
elements := []string{}
for _, el := range al.Elements {
elements = append(elements, el.String())
}
out.WriteString("[")
out.WriteString(strings.Join(elements, ", "))
out.WriteString("]")
return out.String()
}
// Array Index Operator Expression
type IndexExpression struct {
Token token.Token // The [ token
Left Expression
Index Expression
}
func (ie *IndexExpression) expression_node() {}
func (ie *IndexExpression) TokenLiteral() string { return ie.Token.Literal }
func (ie *IndexExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(ie.Left.String())
out.WriteString("[")
out.WriteString(ie.Index.String())
out.WriteString("]")
return out.String()
}
// Stopped at page 169

View File

@ -1,19 +0,0 @@
package evaluator
import "monkey/object"
var builtins = map[string]*object.Builtin{
"len": &object.Builtin{
Fn: func(args ...object.Object) object.Object {
if len(args) != 1 {
return new_error("wrong number of arguments, got=%d, want=1", len(args))
}
switch arg := args[0].(type) {
case *object.String:
return &object.Integer{Value: int64(len(arg.Value))}
default:
return new_error("argument to `len` not supported, got %s", args[0].Type())
}
},
},
}

View File

@ -85,9 +85,6 @@ func Eval(node ast.Node, env *object.Environment) object.Object {
return args[0]
}
return apply_function(function, args)
case *ast.StringLiteral:
return &object.String{Value: node.Value}
}
return nil
@ -111,18 +108,14 @@ func eval_program(program *ast.Program, env *object.Environment) object.Object {
}
func apply_function(fn object.Object, args []object.Object) object.Object {
switch fn := fn.(type) {
case *object.Function:
extended_env := extend_function_env(fn, args)
evaluated := Eval(fn.Body, extended_env)
return unwrap_return_value(evaluated)
case *object.Builtin:
return fn.Fn(args...)
default:
function, ok := fn.(*object.Function)
if !ok {
return new_error("not a function: %s", fn.Type())
}
extended_env := extend_function_env(function, args)
evaluated := Eval(function.Body, extended_env)
return unwrap_return_value(evaluated)
}
func extend_function_env(fn *object.Function, args []object.Object) *object.Environment {
@ -155,14 +148,11 @@ func eval_expression(expressions []ast.Expression, env *object.Environment) []ob
}
func eval_identifier(node *ast.Identifier, env *object.Environment) object.Object {
if val, ok := env.Get(node.Value); ok {
return val
val, ok := env.Get(node.Value)
if !ok {
return new_error("identifier not found: " + node.Value)
}
if builtin, ok := builtins[node.Value]; ok {
return builtin
}
return new_error("identifier not found: " + node.Value)
return val
}
func eval_block_statement(block *ast.BlockStatement, env *object.Environment) object.Object {
@ -182,6 +172,13 @@ func eval_block_statement(block *ast.BlockStatement, env *object.Environment) ob
return result
}
func native_bool_to_boolean_object(input bool) *object.Boolean {
if input {
return TRUE
}
return FALSE
}
func eval_prefix_expression(operator string, right object.Object) object.Object {
switch operator {
case "!":
@ -231,9 +228,6 @@ func eval_infix_expression(operator string, left object.Object, right object.Obj
case left.Type() != right.Type():
return new_error("type mismatch: %s %s %s", left.Type(), operator, right.Type())
case left.Type() == object.STRING_OBJECT && right.Type() == object.STRING_OBJECT:
return eval_string_infix_expression(operator, left, right)
default:
return new_error("unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
@ -273,19 +267,6 @@ func eval_integer_infix_expression(operator string, left object.Object, right ob
}
}
/*
FEATURES TODO:
- Add support for string comparision '==' and '!='.
*/
func eval_string_infix_expression(operator string, left object.Object, right object.Object) object.Object {
if operator != "+" {
return new_error("unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
left_value := left.(*object.String).Value
right_value := right.(*object.String).Value
return &object.String{Value: left_value + right_value}
}
func eval_if_expression(ie *ast.IfExpression, env *object.Environment) object.Object {
condition := Eval(ie.Condition, env)
@ -302,13 +283,6 @@ func eval_if_expression(ie *ast.IfExpression, env *object.Environment) object.Ob
}
}
func native_bool_to_boolean_object(input bool) *object.Boolean {
if input {
return TRUE
}
return FALSE
}
func is_truthy(object object.Object) bool {
switch object {
case NULL:

View File

@ -56,9 +56,6 @@ func TestErrorHandling(l_test *testing.T) {
}
`, "unknown operator: BOOLEAN + BOOLEAN"},
{"foobar", "identifier not found: foobar"},
// Test to only make sure there is only support for + operator in string concatenation, anything else would be wrong.
{`"Hello" - "World"`, "unknown operator: STRING - STRING"},
}
for _, tt := range tests {
@ -240,65 +237,6 @@ func TestClosures(l_test *testing.T) {
test_integer_object(l_test, test_eval(input), 4)
}
func TestStringLiteral(l_test *testing.T) {
input := `"Hello, world!"`
evaluated := test_eval(input)
string, ok := evaluated.(*object.String)
if !ok {
l_test.Fatalf("object is not String, got=%T (%+v)", evaluated, evaluated)
}
if string.Value != "Hello, world!" {
l_test.Errorf("String has wrong value, got=%q", string.Value)
}
}
func TestStringConcatenation(l_test *testing.T) {
input := `"Hello" + " " + "World!"`
evaluated := test_eval(input)
string, ok := evaluated.(*object.String)
if !ok {
l_test.Fatalf("object is not String, got=%T (%+v)", evaluated, evaluated)
}
if string.Value != "Hello World!" {
l_test.Errorf("String has wrong value, got=%q", string.Value)
}
}
func TestBuiltinFunctions(l_test *testing.T) {
tests := []struct {
input string
expected interface{}
}{
{`len("")`, 0},
{`len("four")`, 4},
{`len("hello world")`, 11},
{`len(1)`, "argument to `len` not supported, got INTEGER"},
{`len("one", "two")`, "wrong number of arguments, got=2, want=1"},
}
for _, tt := range tests {
evaluated := test_eval(tt.input)
switch expected := tt.expected.(type) {
case int:
test_integer_object(l_test, evaluated, int64(expected))
case string:
error_object, ok := evaluated.(*object.Error)
if !ok {
l_test.Errorf("object is not Error, got=%T (%+v)", evaluated, evaluated)
continue
}
if error_object.Message != expected {
l_test.Errorf("wrong error message, expected=%q, got=%q", expected, error_object.Message)
}
}
}
}
// Helpers
func test_eval(input string) object.Object {
l_lexer := lexer.New(input)
@ -314,7 +252,6 @@ func test_integer_object(l_test *testing.T, l_object object.Object, expected int
if !ok {
l_test.Errorf("object is not integer, got=%T (%+v)", l_object, l_object)
return false
}
if result.Value != expected {
l_test.Errorf("object has wrong value, got=%d, want=%d", result.Value, expected)

View File

@ -62,13 +62,6 @@ func (l_lexer *Lexer) NextToken() token.Token {
tok = new_token(token.LT, l_lexer.current_char)
case '>':
tok = new_token(token.GT, l_lexer.current_char)
case '"':
tok.Type = token.STRING
tok.Literal = l_lexer.read_string()
case '[':
tok = new_token(token.LBRACKET, l_lexer.current_char)
case ']':
tok = new_token(token.RBRACKET, l_lexer.current_char)
case 0:
tok.Literal = ""
tok.Type = token.EOF
@ -141,19 +134,3 @@ func (l_lexer *Lexer) read_number() string {
func is_digit(ch byte) bool {
return '0' <= ch && ch <= '9'
}
/*
Read the current character until it encounters a closing '"' or end of input.
TODO: some additional thing that can be done at the lexer level with strings is to report an error when it
reaches the end of input without proper termination. Support for character escaping would be really neat.
*/
func (l_lexer *Lexer) read_string() string {
position := l_lexer.position + 1
for {
l_lexer.read_char()
if l_lexer.current_char == '"' || l_lexer.current_char == 0 {
break
}
}
return l_lexer.input[position:l_lexer.position]
}

View File

@ -25,9 +25,6 @@ func TestNextToken(t *testing.T) {
10 == 10;
10 != 9;
"foobar"
"foo babr"
[1,2];
`
tests := []struct {
expectedType token.TokenType
@ -114,16 +111,6 @@ func TestNextToken(t *testing.T) {
{token.INT, "9"},
{token.SEMICOLON, ";"},
{token.STRING, "foobar"},
{token.STRING, "foo babr"},
{token.LBRACKET, "["},
{token.INT, "1"},
{token.COMMA, ","},
{token.INT, "2"},
{token.RBRACKET, "]"},
{token.SEMICOLON, ";"},
{token.EOF, ""},
}

View File

@ -18,30 +18,30 @@ func NewEnvironment() *Environment {
}
/*
Enclosing Environments
Enclosing Environments
Here is a problem case, lets say in monkey I would want to type this:
Here is a problem case, lets say in monkey I would want to type this:
```
let i = 5;
let print_num = fn(i) {
puts(i);
}
```
let i = 5;
let print_num = fn(i) {
puts(i);
}
print_num(10);
puts(i);
```
print_num(10);
puts(i);
```
The ideal result of the above code in the monkey programming language is for 10 and 5 to be the outputs respectively.
In a situation where enclosed environment does not exists, both outputs will be 10 because the current value of i
would be overwritten. The ideal situation would be to preserve the previous binding to 'i' while also making a a new
one.
The ideal result of the above code in the monkey programming language is for 10 and 5 to be the outputs respectively.
In a situation where enclosed environment does not exists, both outputs will be 10 because the current value of i
would be overwritten. The ideal situation would be to preserve the previous binding to 'i' while also making a a new
one.
This works be creating a new instance of object.Environment with a pointer to the environment it should extend, doing this
encloses a fresh and empty environment with an existing one. When the Get method is called and it itself doesn't have the value
associated with the given name, it calls the Get of the enclosing environment. That's the environment it's extending. If that
enclosing environment can't find the value, it calls its own enclosing environment and so on until there is no enclosing environment
anymore and it will error out to an unknown identifier.
This works be creating a new instance of object.Environment with a pointer to the environment it should extend, doing this
encloses a fresh and empty environment with an existing one. When the Get method is called and it itself doesn't have the value
associated with the given name, it calls the Get of the enclosing environment. That's the environment it's extending. If that
enclosing environment can't find the value, it calls its own enclosing environment and so on until there is no enclosing environment
anymore and it will error out to an unknown identifier.
*/
func NewEnclosedEnvironment(outer *Environment) *Environment {
env := NewEnvironment()

View File

@ -16,8 +16,6 @@ const (
RETURN_VALUE_OBJECT = "RETURN_VALUE"
ERROR_OBJECT = "ERROR"
FUNCTION_OBJECT = "FUNCTION"
STRING_OBJECT = "STRING"
BUILTIN_OBJ = "BUILTIN"
)
type Object interface {
@ -103,26 +101,3 @@ func (f *Function) Inspect() string {
return out.String()
}
// String
type String struct {
Value string
}
func (s *String) Type() ObjectType { return STRING_OBJECT }
func (s *String) Inspect() string { return s.Value }
// Builtin Functions
type BuiltinFunction func(args ...Object) Object
type Builtin struct {
Fn BuiltinFunction
}
func (b *Builtin) Type() ObjectType {
return BUILTIN_OBJ
}
func (b *Builtin) Inspect() string {
return "Builtin Function"
}

View File

@ -18,7 +18,6 @@ const (
PRODUCT // *
PREFIX // -x OR !x
CALL // simple_function(x)
INDEX // array[index[
)
// Precedence Table
@ -32,7 +31,6 @@ var precedences = map[token.TokenType]int{
token.SLASH: PRODUCT,
token.ASTERISK: PRODUCT,
token.LPAREN: CALL,
token.LBRACKET: INDEX,
}
func (l_parser *Parser) peek_precedence() int {
@ -115,13 +113,6 @@ func New(l_lexer *lexer.Lexer) *Parser {
// Call Expression
l_parser.register_infix(token.LPAREN, l_parser.parse_call_expression)
// String
l_parser.register_prefix(token.STRING, l_parser.parse_string_literal)
// Array
l_parser.register_prefix(token.LBRACKET, l_parser.parse_array_literal)
l_parser.register_infix(token.LBRACKET, l_parser.parse_index_expression)
return l_parser
}
@ -413,7 +404,7 @@ func (l_parser *Parser) parse_function_parameters() []*ast.Identifier {
func (l_parser *Parser) parse_call_expression(function ast.Expression) ast.Expression {
// defer untrace(trace("parse_call_expression"))
expression := &ast.CallExpression{Token: l_parser.current_token, Function: function}
expression.Arguments = l_parser.parse_expression_list(token.RPAREN)
expression.Arguments = l_parser.parse_call_arguments()
return expression
}
@ -440,53 +431,3 @@ func (l_parser *Parser) parse_call_arguments() []ast.Expression {
}
return args
}
func (l_parser *Parser) parse_string_literal() ast.Expression {
return &ast.StringLiteral{
Token: l_parser.current_token,
Value: l_parser.current_token.Literal,
}
}
// Array
func (l_parser *Parser) parse_array_literal() ast.Expression {
array := &ast.ArrayLiteral{Token: l_parser.current_token}
array.Elements = l_parser.parse_expression_list(token.RBRACKET)
return array
}
func (l_parser *Parser) parse_expression_list(end token.TokenType) []ast.Expression {
list := []ast.Expression{}
if l_parser.peek_token_is(end) {
l_parser.next_token()
return list
}
l_parser.next_token()
list = append(list, l_parser.parse_expression(LOWEST))
for l_parser.peek_token_is(token.COMMA) {
l_parser.next_token()
l_parser.next_token()
list = append(list, l_parser.parse_expression(LOWEST))
}
if !l_parser.expect_peek(end) {
return nil
}
return list
}
func (l_parser *Parser) parse_index_expression(left ast.Expression) ast.Expression {
expression := &ast.IndexExpression{Token: l_parser.current_token, Left: left}
l_parser.next_token()
expression.Index = l_parser.parse_expression(LOWEST)
if !l_parser.expect_peek(token.RBRACKET) {
return nil
}
return expression
}

View File

@ -322,14 +322,6 @@ func TestOperatorPrecedenceParsing(l_test *testing.T) {
"add(a + b + c * d / f + g)",
"add((((a + b) + ((c * d) / f)) + g))",
},
{
"a * [1, 2, 3, 4][b * c] * d",
"((a * ([1, 2, 3, 4][(b * c)])) * d)",
},
{
"add(a * b[2], b[1], 2 * [1, 2][1])",
"add((a * (b[2])), (b[1]), (2 * ([1, 2][1])))",
},
}
for _, tt := range tests {
l_lexer := lexer.New(tt.input)
@ -666,72 +658,6 @@ func TestLetStatements(l_test *testing.T) {
}
}
func TestStringLiteralExpression(l_test *testing.T) {
input := `"hello world";`
l_lexer := lexer.New(input)
l_parser := New(l_lexer)
program := l_parser.ParseProgram()
check_parser_errors(l_test, l_parser)
statement := program.Statements[0].(*ast.ExpressionStatement)
literal, ok := statement.Expression.(*ast.StringLiteral)
if !ok {
l_test.Fatalf("expression not *ast.StringLiteral, got=%T", statement.Expression)
}
if literal.Value != "hello world" {
l_test.Errorf("literal.Value not %q, got=%q", "hello world", literal.Value)
}
}
func TestParsingArrayLiterals(l_test *testing.T) {
input := "[1, 2 * 2, 3 + 3]"
l_lexer := lexer.New(input)
l_parser := New(l_lexer)
program := l_parser.ParseProgram()
check_parser_errors(l_test, l_parser)
statement, ok := program.Statements[0].(*ast.ExpressionStatement)
array, ok := statement.Expression.(*ast.ArrayLiteral)
if !ok {
l_test.Fatalf("expression is not ast.ArrayLiteral, got=%T", statement.Expression)
}
if len(array.Elements) != 3 {
l_test.Fatalf("len(array.Elements) not 3, got=%d", len(array.Elements))
}
testIntegerLiteral(l_test, array.Elements[0], 1)
testInfixExpression(l_test, array.Elements[1], 2, "*", 2)
testInfixExpression(l_test, array.Elements[2], 3, "+", 3)
}
func TestParsingIndexExpressions(l_test *testing.T) {
input := "my_array[1+1]"
l_lexer := lexer.New(input)
l_parser := New(l_lexer)
program := l_parser.ParseProgram()
check_parser_errors(l_test, l_parser)
statement, ok := program.Statements[0].(*ast.ExpressionStatement)
index_expression, ok := statement.Expression.(*ast.IndexExpression)
if !ok {
l_test.Fatalf("expression is not *ast.IndexExpression, got=%T", statement.Expression)
}
if !testIdentifier(l_test, index_expression.Left, "my_array") {
return
}
if !testInfixExpression(l_test, index_expression.Index, 1, "+", 1) {
return
}
}
// Helpers
func check_parser_errors(l_test *testing.T, l_parser *Parser) {

View File

@ -13,9 +13,8 @@ const (
COMMENT = "COMMENT" // TODO(tijani): Implement this!!
// Identifiers and basic type literals
IDENT = "IDENT"
INT = "INT"
STRING = "STRING"
IDENT = "IDENT"
INT = "INT"
// Operators
ASSIGN = "="
@ -38,9 +37,6 @@ const (
LBRACE = "{"
RBRACE = "}"
LBRACKET = "["
RBRACKET = "]"
// Keywords
FUNCTION = "FUNCTION"
LET = "LET"