diff --git a/lexer/token.go b/lexer/token.go new file mode 100644 index 0000000..fb42455 --- /dev/null +++ b/lexer/token.go @@ -0,0 +1,111 @@ +package lexer + +import ( + "errors" + "fmt" + "io" +) + +var ( + EOF = io.EOF // EOF marker +) + +// Token type +type TokenType int + +const ( + TokenError TokenType = iota +) + +// Token Position +type TokenPosition struct { + Line int + Row int +} + +// Token +type Token interface { + Type() TokenType + String() string + Position() TokenPosition +} + +type token struct { + typ TokenType + pos TokenPosition + val string +} + +func NewToken(typ TokenType, val []rune, pos TokenPosition) Token { + return &token{ + typ: typ, + val: string(val), + pos: pos, + } +} + +func (t token) Type() TokenType { + return t.typ +} + +func (t token) Position() TokenPosition { + return t.pos +} + +func (t token) String() string { + return t.val +} + +// ErrorToken +type ErrorToken interface { + Token + Error() string + Unwrap() error +} + +type errorToken struct { + token + err error +} + +func NewErrorToken(err error, pos TokenPosition) ErrorToken { + return &errorToken{ + token: token{ + typ: TokenError, + val: err.Error(), + pos: pos, + }, + err: err, + } +} + +func (t errorToken) Error() string { + return t.err.Error() +} + +func (t errorToken) Unwrap() error { + return t.err +} + +// SyntaxErrorToken +type SyntaxErrorToken struct { + ErrorToken + + Cursor int + Buffer string +} + +func NewSyntaxErrorToken(msg string, pos TokenPosition, cur int, buffer []rune) *SyntaxErrorToken { + s := fmt.Sprintf("Syntax Error at %v.%v+%v", pos.Line, pos.Row, cur) + + if len(msg) > 0 { + s = fmt.Sprintf("%s: %s", s, msg) + } + + return &SyntaxErrorToken{ + ErrorToken: NewErrorToken(errors.New(s), pos), + + Cursor: cur, + Buffer: string(buffer), + } +}