Compare commits

...

10 Commits

Author SHA1 Message Date
d9d8a27568 puts()
Can now print out to STDOUT. puts() return NULL, its only business is to print.

git-svn-id: https://svn.tlawal.org/svn/monkey@66 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2023-08-14 22:39:08 +00:00
831b333a74 Built-in Functions: len()
Will take in a string a string and return the length of said string. This function only accepts one 
argument. This function only accepts strings. Any other type will result in an error.

git-svn-id: https://svn.tlawal.org/svn/monkey@65 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2023-08-14 19:40:04 +00:00
64e048dfcd String concatenation now a thing.
git-svn-id: https://svn.tlawal.org/svn/monkey@64 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2023-05-09 23:57:45 +00:00
ec6c1ce5aa Now supports strings.
git-svn-id: https://svn.tlawal.org/svn/monkey@63 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2023-05-09 19:59:18 +00:00
4f3ef7d314 Interpreter complete.
git-svn-id: https://svn.tlawal.org/svn/monkey@62 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2023-05-05 19:15:39 +00:00
a3b39f1988 git-svn-id: https://svn.tlawal.org/svn/monkey@61 f6afcba9-9ef1-4bdd-9b72-7484f5705bac 2023-05-05 16:12:19 +00:00
ef88c389f0 Adding Support for arrays. TESTs do not pass, need to fix.
git-svn-id: https://svn.tlawal.org/svn/monkey@60 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2022-11-24 20:02:19 +00:00
40de215f70 Built-in functions:
len() added

git-svn-id: https://svn.tlawal.org/svn/monkey@59 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2022-11-11 20:30:15 +00:00
b028741d75 - String Concatenation is now possible.
- Added test to ensure only '+' will work for concatenating strings
- Likkle reformatting.


git-svn-id: https://svn.tlawal.org/svn/monkey@58 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2022-11-03 15:01:40 +00:00
2c2c936b29 String Implementation is done. Still has TODO that would make for good excercise.
git-svn-id: https://svn.tlawal.org/svn/monkey@57 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
2022-11-03 14:23:07 +00:00
10 changed files with 162 additions and 37 deletions

View File

@ -287,7 +287,7 @@ func (ce *CallExpression) String() string {
return out.String()
}
// String Literal
// String
type StringLiteral struct {
Token token.Token
Value string

31
evaluator/builtins.go Normal file
View File

@ -0,0 +1,31 @@
package evaluator
import(
"fmt"
"monkey/object"
)
var builtins = map[string]*object.Builtin{
"len": &object.Builtin{
Fn: func(args ...object.Object) object.Object {
if len(args) != 1 {
return new_error("wrong number of arguments, got=%d, want=1", len(args))
}
switch arg := args[0].(type) {
case *object.String:
return &object.Integer{Value: int64(len(arg.Value))}
default:
return new_error("argument to `len` not supported, got %s", args[0].Type())
}
},
},
"puts": &object.Builtin {
Fn: func(args ...object.Object) object.Object {
for _, arg := range args {
fmt.Println(arg.Inspect())
}
return NULL // puts only print things passed into it, it does not return a value when used in a function or such.
},
},
}

View File

@ -85,6 +85,9 @@ func Eval(node ast.Node, env *object.Environment) object.Object {
return args[0]
}
return apply_function(function, args)
case *ast.StringLiteral:
return &object.String{Value: node.Value}
}
return nil
@ -108,14 +111,19 @@ func eval_program(program *ast.Program, env *object.Environment) object.Object {
}
func apply_function(fn object.Object, args []object.Object) object.Object {
function, ok := fn.(*object.Function)
if !ok {
return new_error("not a function: %s", fn.Type())
switch fn := fn.(type) {
case *object.Function:
extended_env := extend_function_env(fn, args)
evaluated := Eval(fn.Body, extended_env)
return unwrap_return_value(evaluated)
case *object.Builtin:
return fn.Fn(args...)
default:
return new_error("not a funciton: %s", fn.Type())
}
extended_env := extend_function_env(function, args)
evaluated := Eval(function.Body, extended_env)
return unwrap_return_value(evaluated)
}
func extend_function_env(fn *object.Function, args []object.Object) *object.Environment {
@ -148,11 +156,14 @@ func eval_expression(expressions []ast.Expression, env *object.Environment) []ob
}
func eval_identifier(node *ast.Identifier, env *object.Environment) object.Object {
val, ok := env.Get(node.Value)
if !ok {
return new_error("identifier not found: " + node.Value)
}
if val, ok := env.Get(node.Value); ok {
return val
}
if builtin, ok := builtins[node.Value]; ok {
return builtin
}
return new_error("identifier not found: " + node.Value)
}
func eval_block_statement(block *ast.BlockStatement, env *object.Environment) object.Object {
@ -228,6 +239,9 @@ func eval_infix_expression(operator string, left object.Object, right object.Obj
case left.Type() != right.Type():
return new_error("type mismatch: %s %s %s", left.Type(), operator, right.Type())
case left.Type() == object.STRING_OBJECT && right.Type() == object.STRING_OBJECT:
return eval_string_infix_expression(operator, left, right)
default:
return new_error("unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
@ -283,6 +297,16 @@ func eval_if_expression(ie *ast.IfExpression, env *object.Environment) object.Ob
}
}
func eval_string_infix_expression(operator string, left, right object.Object) object.Object {
if operator != "+" {
return new_error("unknown operator: %s %s %s", left.Type(), operator, right.Type())
}
left_value := left.(*object.String).Value
right_value := right.(*object.String).Value
return &object.String{Value: left_value + right_value}
}
func is_truthy(object object.Object) bool {
switch object {
case NULL:

View File

@ -56,6 +56,10 @@ func TestErrorHandling(l_test *testing.T) {
}
`, "unknown operator: BOOLEAN + BOOLEAN"},
{"foobar", "identifier not found: foobar"},
{
`"Hello" - "World"`,
"unknown operator: STRING - STRING",
},
}
for _, tt := range tests {
@ -237,6 +241,66 @@ func TestClosures(l_test *testing.T) {
test_integer_object(l_test, test_eval(input), 4)
}
func TestStringLiteral(l_test *testing.T) {
input := `"Hello World!";`
evaluated := test_eval(input)
string, ok := evaluated.(*object.String)
if !ok {
l_test.Fatalf("object is not String, got =%T (%+v)", evaluated, evaluated)
}
if string.Value != "Hello World!" {
l_test.Errorf("String has wrong value, got=%q", string.Value)
}
}
func TestStringConcatenation(l_test *testing.T) {
input := `"Hello" + " " + "World!"`
evaluated := test_eval(input)
string, ok := evaluated.(*object.String)
if !ok {
l_test.Fatalf("object is not String. got=%T (%+v)", evaluated, evaluated)
}
if string.Value != "Hello World!" {
l_test.Errorf("String has wrong value. got=%q", string.Value)
}
}
func TestBuiltinFunction(l_test *testing.T) {
tests := []struct {
input string
expected interface{}
}{
{`len("")`, 0},
{`len("four")`, 4},
{`len("hello world")`, 11},
{`len(1)`, "argument to `len` not supported, got INTEGER"},
{`len("one", "two")`, "wrong number of arguments, got=2, want=1"},
}
for _, tt := range tests {
evaluated := test_eval(tt.input)
switch expected := tt.expected.(type) {
case int:
test_integer_object(l_test, evaluated, int64(expected))
case string:
error_object, ok := evaluated.(*object.Error)
if !ok {
l_test.Errorf("object is not Error, got=%T (%+v)", evaluated, evaluated)
continue
}
if error_object.Message != expected {
l_test.Errorf("wrong error message, expected=%q, got=%q", expected, error_object.Message)
}
}
}
}
// Helpers
func test_eval(input string) object.Object {
l_lexer := lexer.New(input)

View File

@ -63,8 +63,9 @@ func (l_lexer *Lexer) NextToken() token.Token {
case '>':
tok = new_token(token.GT, l_lexer.current_char)
case '"':
tok.Type = token.STRING
tok.Literal = l_lexer.read_string()
tok.Type = token.STRING
case 0:
tok.Literal = ""
tok.Type = token.EOF
@ -138,11 +139,6 @@ func is_digit(ch byte) bool {
return '0' <= ch && ch <= '9'
}
/*
Read the current character until it encounters a closing '"' or end of input.
TODO: some additional thing that can be done at the lexer level with strings is to report an error when it
reaches the end of input without proper termination. Support for character escaping would be really neat.
*/
func (l_lexer *Lexer) read_string() string {
position := l_lexer.position + 1
for {

View File

@ -26,7 +26,7 @@ func TestNextToken(t *testing.T) {
10 == 10;
10 != 9;
"foobar"
"foo babr"
"foo bar"
`
tests := []struct {
expectedType token.TokenType
@ -114,8 +114,7 @@ func TestNextToken(t *testing.T) {
{token.SEMICOLON, ";"},
{token.STRING, "foobar"},
{token.STRING, "foo babr"},
{token.STRING, "foo bar"},
{token.EOF, ""},
}

View File

@ -17,6 +17,7 @@ const (
ERROR_OBJECT = "ERROR"
FUNCTION_OBJECT = "FUNCTION"
STRING_OBJECT = "STRING"
BUILTIN_OBJ = "BUILTIN"
)
type Object interface {
@ -109,4 +110,14 @@ type String struct {
}
func (s *String) Type() ObjectType { return STRING_OBJECT }
func (s *String) Inspec() string { return s.Value }
func (s *String) Inspect() string { return s.Value }
// Built-in functions
type BuiltinFunction func(args ...Object) Object
type Builtin struct {
Fn BuiltinFunction
}
func (b *Builtin) Type() ObjectType { return BUILTIN_OBJ }
func (b *Builtin) Inspect() string { return "Builtin Function" }

View File

@ -85,6 +85,7 @@ func New(l_lexer *lexer.Lexer) *Parser {
l_parser.register_prefix(token.INT, l_parser.parse_integer_literal)
l_parser.register_prefix(token.BANG, l_parser.parse_prefix_expression)
l_parser.register_prefix(token.MINUS, l_parser.parse_prefix_expression)
l_parser.register_prefix(token.STRING, l_parser.parse_string_literal)
// Infix Operation
l_parser.infix_parse_functions = make(map[token.TokenType]infix_parse_function)
@ -113,9 +114,6 @@ func New(l_lexer *lexer.Lexer) *Parser {
// Call Expression
l_parser.register_infix(token.LPAREN, l_parser.parse_call_expression)
// String
l_parser.register_prefix(token.STRING, l_parser.parse_string_literal)
return l_parser
}
@ -288,6 +286,13 @@ func (l_parser *Parser) parse_infix_expression(left ast.Expression) ast.Expressi
return expression
}
func (l_parser *Parser) parse_string_literal() ast.Expression {
return &ast.StringLiteral{
Token: l_parser.current_token,
Value: l_parser.current_token.Literal,
}
}
func (l_parser *Parser) no_prefix_parse_function_error(l_token_type token.TokenType) {
message := fmt.Sprintf("no prefix parse function for %s, found", l_token_type)
l_parser.errors = append(l_parser.errors, message)
@ -434,10 +439,3 @@ func (l_parser *Parser) parse_call_arguments() []ast.Expression {
}
return args
}
func (l_parser *Parser) parse_string_literal() ast.Expression {
return &ast.StringLiteral{
Token: l_parser.current_token,
Value: l_parser.current_token.Literal,
}
}

View File

@ -659,7 +659,8 @@ func TestLetStatements(l_test *testing.T) {
}
func TestStringLiteralExpression(l_test *testing.T) {
input := `"hello world";`
input := `"Hello world";`
l_lexer := lexer.New(input)
l_parser := New(l_lexer)
program := l_parser.ParseProgram()
@ -671,8 +672,8 @@ func TestStringLiteralExpression(l_test *testing.T) {
l_test.Fatalf("expression not *ast.StringLiteral, got=%T", statement.Expression)
}
if literal.Value != "hello world" {
l_test.Errorf("literal.Value not %q, got=%q", "hello world", literal.Value)
if literal.Value != "Hello world" {
l_test.Errorf("literal.Value not %q, got=%q", "Hello world", literal.Value)
}
}

View File

@ -15,7 +15,6 @@ const (
// Identifiers and basic type literals
IDENT = "IDENT"
INT = "INT"
STRING = "STRING"
// Operators
ASSIGN = "="
@ -46,6 +45,8 @@ const (
TRUE = "TRUE"
FALSE = "FALSE"
RETURN = "RETURN"
STRING = "STRING"
)
var keywords = map[string]TokenType{