From bcd7ed3ec9f59f350a29eb95ffbac71345d93e6d Mon Sep 17 00:00:00 2001 From: franck cuny Date: Sat, 11 Jan 2020 13:53:44 +0100 Subject: lexer: support more operator tokens. Support the operator tokens that were added to our tokenizer. This also add a few more tests to ensure we handle them correctly. --- users/fcuny/exp/monkey/pkg/lexer/lexer.go | 13 +++++++++++++ 1 file changed, 13 insertions(+) (limited to 'users/fcuny/exp/monkey/pkg/lexer/lexer.go') diff --git a/users/fcuny/exp/monkey/pkg/lexer/lexer.go b/users/fcuny/exp/monkey/pkg/lexer/lexer.go index fc29371..d538cf5 100644 --- a/users/fcuny/exp/monkey/pkg/lexer/lexer.go +++ b/users/fcuny/exp/monkey/pkg/lexer/lexer.go @@ -67,6 +67,19 @@ func (l *Lexer) NextToken() token.Token { tok = newToken(token.ASSIGN, l.ch) case '+': tok = newToken(token.PLUS, l.ch) + case '-': + tok = newToken(token.MINUS, l.ch) + case '!': + tok = newToken(token.BANG, l.ch) + case '*': + tok = newToken(token.ASTERISK, l.ch) + case '/': + tok = newToken(token.SLASH, l.ch) + case '<': + tok = newToken(token.LT, l.ch) + case '>': + tok = newToken(token.GT, l.ch) + case ';': tok = newToken(token.SEMICOLON, l.ch) case ',': -- cgit 1.4.1