Compare commits

..

No commits in common. "d9d8a275681f764d0cc0abbe2beafca19ce3ddfa" and "2791a27f1fb10013d46e83f93d7f80f23f488372" have entirely different histories.

10 changed files with 37 additions and 162 deletions

View File

@ -287,7 +287,7 @@ func (ce *CallExpression) String() string {
return out.String()
}
// String
// String Literal
type StringLiteral struct {
Token token.Token
Value string

View File

@ -1,31 +0,0 @@
package evaluator
import(
"fmt"
"monkey/object"
)
var builtins = map[string]*object.Builtin{
"len": &object.Builtin{
Fn: func(args ...object.Object) object.Object {
if len(args) != 1 {
return new_error("wrong number of arguments, got=%d, want=1", len(args))
}
switch arg := args[0].(type) {
case *object.String:
return &object.Integer{Value: int64(len(arg.Value))}
default:
return new_error("argument to `len` not supported, got %s", args[0].Type())
}
},
},
"puts": &object.Builtin {
Fn: func(args ...object.Object) object.Object {
for _, arg := range args {
fmt.Println(arg.Inspect())
}
return NULL // puts only print things passed into it, it does not return a value when used in a function or such.
},
},
}

View File

@ -85,9 +85,6 @@ func Eval(node ast.Node, env *object.Environment) object.Object {
return args[0]
}
return apply_function(function, args)
case *ast.StringLiteral:
return &object.String{Value: node.Value}
}
return nil
@ -111,19 +108,14 @@ func eval_program(program *ast.Program, env *object.Environment) object.Object {
}
func apply_function(fn object.Object, args []object.Object) object.Object {
switch fn := fn.(type) {
case *object.Function:
extended_env := extend_function_env(fn, args)
evaluated := Eval(fn.Body, extended_env)
return unwrap_return_value(evaluated)
case *object.Builtin:
return fn.Fn(args...)
default:
return new_error("not a funciton: %s", fn.Type())
function, ok := fn.(*object.Function)
if !ok {
return new_error("not a function: %s", fn.Type())
}
extended_env := extend_function_env(function, args)
evaluated := Eval(function.Body, extended_env)
return unwrap_return_value(evaluated)
}
func extend_function_env(fn *object.Function, args []object.Object) *object.Environment {
@ -156,14 +148,11 @@ func eval_expression(expressions []ast.Expression, env *object.Environment) []ob
}
func eval_identifier(node *ast.Identifier, env *object.Environment) object.Object {
if val, ok := env.Get(node.Value); ok {
return val
val, ok := env.Get(node.Value)
if !ok {
return new_error("identifier not found: " + node.Value)
}
if builtin, ok := builtins[node.Value]; ok {
return builtin
}
return new_error("identifier not found: " + node.Value)
return val
}
func eval_block_statement(block *ast.BlockStatement, env *object.Environment) object.Object {
@ -239,9 +228,6 @@ func eval_infix_expression(operator string, left object.Object, right object.Obj
case left.Type() != right.Type():
return new_error("type mismatch: %s %s %s", left.Type(), operator, right.Type())
case left.Type() == object.STRING_OBJECT && right.Type() == object.STRING_OBJECT:
return eval_string_infix_expression(operator, left, right)
default:
return new_error("unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
@ -297,16 +283,6 @@ func eval_if_expression(ie *ast.IfExpression, env *object.Environment) object.Ob
}
}
func eval_string_infix_expression(operator string, left, right object.Object) object.Object {
if operator != "+" {
return new_error("unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
left_value := left.(*object.String).Value
right_value := right.(*object.String).Value
return &object.String{Value: left_value + right_value}
}
func is_truthy(object object.Object) bool {
switch object {
case NULL:

View File

@ -56,10 +56,6 @@ func TestErrorHandling(l_test *testing.T) {
}
`, "unknown operator: BOOLEAN + BOOLEAN"},
{"foobar", "identifier not found: foobar"},
{
`"Hello" - "World"`,
"unknown operator: STRING - STRING",
},
}
for _, tt := range tests {
@ -241,66 +237,6 @@ func TestClosures(l_test *testing.T) {
test_integer_object(l_test, test_eval(input), 4)
}
func TestStringLiteral(l_test *testing.T) {
input := `"Hello World!";`
evaluated := test_eval(input)
string, ok := evaluated.(*object.String)
if !ok {
l_test.Fatalf("object is not String, got =%T (%+v)", evaluated, evaluated)
}
if string.Value != "Hello World!" {
l_test.Errorf("String has wrong value, got=%q", string.Value)
}
}
func TestStringConcatenation(l_test *testing.T) {
input := `"Hello" + " " + "World!"`
evaluated := test_eval(input)
string, ok := evaluated.(*object.String)
if !ok {
l_test.Fatalf("object is not String. got=%T (%+v)", evaluated, evaluated)
}
if string.Value != "Hello World!" {
l_test.Errorf("String has wrong value. got=%q", string.Value)
}
}
func TestBuiltinFunction(l_test *testing.T) {
tests := []struct {
input string
expected interface{}
}{
{`len("")`, 0},
{`len("four")`, 4},
{`len("hello world")`, 11},
{`len(1)`, "argument to `len` not supported, got INTEGER"},
{`len("one", "two")`, "wrong number of arguments, got=2, want=1"},
}
for _, tt := range tests {
evaluated := test_eval(tt.input)
switch expected := tt.expected.(type) {
case int:
test_integer_object(l_test, evaluated, int64(expected))
case string:
error_object, ok := evaluated.(*object.Error)
if !ok {
l_test.Errorf("object is not Error, got=%T (%+v)", evaluated, evaluated)
continue
}
if error_object.Message != expected {
l_test.Errorf("wrong error message, expected=%q, got=%q", expected, error_object.Message)
}
}
}
}
// Helpers
func test_eval(input string) object.Object {
l_lexer := lexer.New(input)

View File

@ -63,9 +63,8 @@ func (l_lexer *Lexer) NextToken() token.Token {
case '>':
tok = new_token(token.GT, l_lexer.current_char)
case '"':
tok.Literal = l_lexer.read_string()
tok.Type = token.STRING
tok.Literal = l_lexer.read_string()
case 0:
tok.Literal = ""
tok.Type = token.EOF
@ -139,6 +138,11 @@ func is_digit(ch byte) bool {
return '0' <= ch && ch <= '9'
}
/*
Read the current character until it encounters a closing '"' or end of input.
TODO: some additional thing that can be done at the lexer level with strings is to report an error when it
reaches the end of input without proper termination. Support for character escaping would be really neat.
*/
func (l_lexer *Lexer) read_string() string {
position := l_lexer.position + 1
for {

View File

@ -26,7 +26,7 @@ func TestNextToken(t *testing.T) {
10 == 10;
10 != 9;
"foobar"
"foo bar"
"foo babr"
`
tests := []struct {
expectedType token.TokenType
@ -114,7 +114,8 @@ func TestNextToken(t *testing.T) {
{token.SEMICOLON, ";"},
{token.STRING, "foobar"},
{token.STRING, "foo bar"},
{token.STRING, "foo babr"},
{token.EOF, ""},
}

View File

@ -17,7 +17,6 @@ const (
ERROR_OBJECT = "ERROR"
FUNCTION_OBJECT = "FUNCTION"
STRING_OBJECT = "STRING"
BUILTIN_OBJ = "BUILTIN"
)
type Object interface {
@ -110,14 +109,4 @@ type String struct {
}
func (s *String) Type() ObjectType { return STRING_OBJECT }
func (s *String) Inspect() string { return s.Value }
// Built-in functions
type BuiltinFunction func(args ...Object) Object
type Builtin struct {
Fn BuiltinFunction
}
func (b *Builtin) Type() ObjectType { return BUILTIN_OBJ }
func (b *Builtin) Inspect() string { return "Builtin Function" }
func (s *String) Inspec() string { return s.Value }

View File

@ -85,7 +85,6 @@ func New(l_lexer *lexer.Lexer) *Parser {
l_parser.register_prefix(token.INT, l_parser.parse_integer_literal)
l_parser.register_prefix(token.BANG, l_parser.parse_prefix_expression)
l_parser.register_prefix(token.MINUS, l_parser.parse_prefix_expression)
l_parser.register_prefix(token.STRING, l_parser.parse_string_literal)
// Infix Operation
l_parser.infix_parse_functions = make(map[token.TokenType]infix_parse_function)
@ -114,6 +113,9 @@ func New(l_lexer *lexer.Lexer) *Parser {
// Call Expression
l_parser.register_infix(token.LPAREN, l_parser.parse_call_expression)
// String
l_parser.register_prefix(token.STRING, l_parser.parse_string_literal)
return l_parser
}
@ -286,13 +288,6 @@ func (l_parser *Parser) parse_infix_expression(left ast.Expression) ast.Expressi
return expression
}
func (l_parser *Parser) parse_string_literal() ast.Expression {
return &ast.StringLiteral{
Token: l_parser.current_token,
Value: l_parser.current_token.Literal,
}
}
func (l_parser *Parser) no_prefix_parse_function_error(l_token_type token.TokenType) {
message := fmt.Sprintf("no prefix parse function for %s, found", l_token_type)
l_parser.errors = append(l_parser.errors, message)
@ -439,3 +434,10 @@ func (l_parser *Parser) parse_call_arguments() []ast.Expression {
}
return args
}
func (l_parser *Parser) parse_string_literal() ast.Expression {
return &ast.StringLiteral{
Token: l_parser.current_token,
Value: l_parser.current_token.Literal,
}
}

View File

@ -659,8 +659,7 @@ func TestLetStatements(l_test *testing.T) {
}
func TestStringLiteralExpression(l_test *testing.T) {
input := `"Hello world";`
input := `"hello world";`
l_lexer := lexer.New(input)
l_parser := New(l_lexer)
program := l_parser.ParseProgram()
@ -672,8 +671,8 @@ func TestStringLiteralExpression(l_test *testing.T) {
l_test.Fatalf("expression not *ast.StringLiteral, got=%T", statement.Expression)
}
if literal.Value != "Hello world" {
l_test.Errorf("literal.Value not %q, got=%q", "Hello world", literal.Value)
if literal.Value != "hello world" {
l_test.Errorf("literal.Value not %q, got=%q", "hello world", literal.Value)
}
}

View File

@ -13,8 +13,9 @@ const (
COMMENT = "COMMENT" // TODO(tijani): Implement this!!
// Identifiers and basic type literals
IDENT = "IDENT"
INT = "INT"
IDENT = "IDENT"
INT = "INT"
STRING = "STRING"
// Operators
ASSIGN = "="
@ -45,8 +46,6 @@ const (
TRUE = "TRUE"
FALSE = "FALSE"
RETURN = "RETURN"
STRING = "STRING"
)
var keywords = map[string]TokenType{