influxdb/influxql/token.go

309 lines
5.1 KiB
Go
Raw Normal View History

2014-11-15 19:04:30 +00:00
package influxql
import (
"strings"
)
2014-11-15 19:04:30 +00:00
// Token is a lexical token of the InfluxQL language.
type Token int
const (
// Special tokens
ILLEGAL Token = iota
EOF
WS
2014-11-15 19:04:30 +00:00
literal_beg
// Literals
IDENT // main
NUMBER // 12345.67
DURATION_VAL // 13h
STRING // "abc"
BADSTRING // "abc
BADESCAPE // \q
TRUE // true
FALSE // false
REGEX // Regular expressions
BADREGEX // `.*
2014-11-15 19:04:30 +00:00
literal_end
operator_beg
// Operators
ADD // +
SUB // -
MUL // *
DIV // /
AND // AND
OR // OR
EQ // =
NEQ // !=
EQREGEX // =~
NEQREGEX // !~
LT // <
LTE // <=
GT // >
GTE // >=
2014-11-15 19:04:30 +00:00
operator_end
LPAREN // (
RPAREN // )
COMMA // ,
COLON // :
SEMICOLON // ;
DOT // .
2014-11-15 19:04:30 +00:00
keyword_beg
// Keywords
ALL
ALTER
2014-11-15 19:04:30 +00:00
AS
ASC
BEGIN
2014-11-15 19:04:30 +00:00
BY
2014-11-25 04:49:09 +00:00
CREATE
2014-11-15 19:04:30 +00:00
CONTINUOUS
DATABASE
2015-01-09 15:47:57 +00:00
DATABASES
2014-12-31 13:47:21 +00:00
DEFAULT
2014-11-15 19:04:30 +00:00
DELETE
DESC
DISTINCT
2014-11-15 19:04:30 +00:00
DROP
DURATION
END
EXISTS
2014-11-15 19:04:30 +00:00
EXPLAIN
2014-12-10 05:07:29 +00:00
FIELD
FOR
2015-10-03 02:49:11 +00:00
FORCE
2014-11-15 19:04:30 +00:00
FROM
2015-01-03 03:56:26 +00:00
GRANT
GRANTS
2014-11-22 04:12:48 +00:00
GROUP
IF
IN
INF
2014-11-15 19:04:30 +00:00
INNER
INSERT
INTO
2015-01-28 10:02:36 +00:00
KEY
2014-12-10 05:07:29 +00:00
KEYS
2014-11-15 19:04:30 +00:00
LIMIT
2014-12-10 05:07:29 +00:00
MEASUREMENT
MEASUREMENTS
NOT
2015-01-25 20:34:49 +00:00
OFFSET
ON
2014-11-15 19:04:30 +00:00
ORDER
PASSWORD
POLICY
POLICIES
PRIVILEGES
2014-11-15 19:04:30 +00:00
QUERIES
QUERY
READ
2015-09-21 18:59:12 +00:00
RENAME
REPLICATION
RETENTION
REVOKE
2014-11-15 19:04:30 +00:00
SELECT
SERIES
2015-10-03 02:49:11 +00:00
SERVER
2015-03-10 19:46:05 +00:00
SERVERS
SET
SHOW
SHARDS
SLIMIT
2015-03-12 23:07:41 +00:00
STATS
2015-03-24 03:13:54 +00:00
DIAGNOSTICS
SOFFSET
2014-12-10 05:07:29 +00:00
TAG
TO
USER
2015-01-14 16:53:17 +00:00
USERS
2014-12-10 05:07:29 +00:00
VALUES
2014-11-15 19:04:30 +00:00
WHERE
WITH
WRITE
2014-11-15 19:04:30 +00:00
keyword_end
)
var tokens = [...]string{
ILLEGAL: "ILLEGAL",
EOF: "EOF",
WS: "WS",
2014-11-15 19:04:30 +00:00
IDENT: "IDENT",
NUMBER: "NUMBER",
DURATION_VAL: "DURATION_VAL",
STRING: "STRING",
BADSTRING: "BADSTRING",
BADESCAPE: "BADESCAPE",
TRUE: "TRUE",
FALSE: "FALSE",
REGEX: "REGEX",
2014-11-15 19:04:30 +00:00
ADD: "+",
SUB: "-",
MUL: "*",
DIV: "/",
AND: "AND",
OR: "OR",
EQ: "=",
NEQ: "!=",
EQREGEX: "=~",
NEQREGEX: "!~",
LT: "<",
LTE: "<=",
GT: ">",
GTE: ">=",
2014-11-15 19:04:30 +00:00
LPAREN: "(",
RPAREN: ")",
COMMA: ",",
COLON: ":",
SEMICOLON: ";",
DOT: ".",
2014-11-15 19:04:30 +00:00
ALL: "ALL",
ALTER: "ALTER",
2014-12-10 05:07:29 +00:00
AS: "AS",
ASC: "ASC",
BEGIN: "BEGIN",
2014-12-10 05:07:29 +00:00
BY: "BY",
CREATE: "CREATE",
CONTINUOUS: "CONTINUOUS",
DATABASE: "DATABASE",
2015-01-09 15:47:57 +00:00
DATABASES: "DATABASES",
2014-12-31 16:22:07 +00:00
DEFAULT: "DEFAULT",
2014-12-10 05:07:29 +00:00
DELETE: "DELETE",
DESC: "DESC",
DROP: "DROP",
DISTINCT: "DISTINCT",
DURATION: "DURATION",
END: "END",
EXISTS: "EXISTS",
2014-12-10 05:07:29 +00:00
EXPLAIN: "EXPLAIN",
FIELD: "FIELD",
FOR: "FOR",
2015-10-03 02:49:11 +00:00
FORCE: "FORCE",
2014-12-10 05:07:29 +00:00
FROM: "FROM",
2015-01-03 03:56:26 +00:00
GRANT: "GRANT",
GRANTS: "GRANTS",
2014-12-10 05:07:29 +00:00
GROUP: "GROUP",
IF: "IF",
IN: "IN",
INF: "INF",
2014-12-10 05:07:29 +00:00
INNER: "INNER",
INSERT: "INSERT",
INTO: "INTO",
2015-01-28 10:02:36 +00:00
KEY: "KEY",
2014-12-10 05:07:29 +00:00
KEYS: "KEYS",
LIMIT: "LIMIT",
MEASUREMENT: "MEASUREMENT",
MEASUREMENTS: "MEASUREMENTS",
NOT: "NOT",
2015-01-25 20:34:49 +00:00
OFFSET: "OFFSET",
ON: "ON",
2014-12-10 05:07:29 +00:00
ORDER: "ORDER",
PASSWORD: "PASSWORD",
POLICY: "POLICY",
POLICIES: "POLICIES",
PRIVILEGES: "PRIVILEGES",
2014-12-10 05:07:29 +00:00
QUERIES: "QUERIES",
QUERY: "QUERY",
READ: "READ",
2015-09-21 18:59:12 +00:00
RENAME: "RENAME",
REPLICATION: "REPLICATION",
RETENTION: "RETENTION",
REVOKE: "REVOKE",
2014-12-10 05:07:29 +00:00
SELECT: "SELECT",
SERIES: "SERIES",
2015-10-03 02:49:11 +00:00
SERVER: "SERVER",
2015-03-10 19:46:05 +00:00
SERVERS: "SERVERS",
SET: "SET",
SHOW: "SHOW",
SHARDS: "SHARDS",
SLIMIT: "SLIMIT",
SOFFSET: "SOFFSET",
2015-03-12 23:07:41 +00:00
STATS: "STATS",
2015-03-24 03:13:54 +00:00
DIAGNOSTICS: "DIAGNOSTICS",
2014-12-10 05:07:29 +00:00
TAG: "TAG",
TO: "TO",
USER: "USER",
2015-01-14 16:53:17 +00:00
USERS: "USERS",
2014-12-10 05:07:29 +00:00
VALUES: "VALUES",
WHERE: "WHERE",
WITH: "WITH",
WRITE: "WRITE",
2014-11-15 19:04:30 +00:00
}
var keywords map[string]Token
func init() {
keywords = make(map[string]Token)
for tok := keyword_beg + 1; tok < keyword_end; tok++ {
keywords[strings.ToLower(tokens[tok])] = tok
}
for _, tok := range []Token{AND, OR} {
keywords[strings.ToLower(tokens[tok])] = tok
2014-11-15 19:04:30 +00:00
}
keywords["true"] = TRUE
keywords["false"] = FALSE
}
// String returns the string representation of the token.
func (tok Token) String() string {
if tok >= 0 && tok < Token(len(tokens)) {
return tokens[tok]
}
return ""
}
// Precedence returns the operator precedence of the binary operator token.
func (tok Token) Precedence() int {
switch tok {
case OR:
return 1
case AND:
return 2
case EQ, NEQ, EQREGEX, NEQREGEX, LT, LTE, GT, GTE:
2014-11-15 19:04:30 +00:00
return 3
case ADD, SUB:
return 4
case MUL, DIV:
return 5
}
return 0
}
// isOperator returns true for operator tokens.
func (tok Token) isOperator() bool { return tok > operator_beg && tok < operator_end }
2014-11-22 04:12:48 +00:00
// tokstr returns a literal if provided, otherwise returns the token string.
func tokstr(tok Token, lit string) string {
if lit != "" {
return lit
}
return tok.String()
}
2014-11-15 19:04:30 +00:00
// Lookup returns the token associated with a given string.
func Lookup(ident string) Token {
if tok, ok := keywords[strings.ToLower(ident)]; ok {
2014-11-15 19:04:30 +00:00
return tok
}
return IDENT
}
// Pos specifies the line and character position of a token.
// The Char and Line are both zero-based indexes.
type Pos struct {
Line int
Char int
}