chore: Add Makefile

pull/10616/head
Nathaniel Cook 2018-05-22 11:28:04 -06:00
parent ae31810f41
commit c8ff490338
16 changed files with 415 additions and 109 deletions

4
.gitignore vendored
View File

@ -6,4 +6,6 @@ idpdb.bolt
# Project binaries.
/idp
/transpilerd
/idpd
/ifqld
/bin

22
Gopkg.lock generated
View File

@ -244,6 +244,16 @@
packages = ["."]
revision = "bb74f1db0675b241733089d5a1faa5dd8b0ef57b"
[[projects]]
branch = "master"
name = "github.com/mna/pigeon"
packages = [
".",
"ast",
"builder"
]
revision = "2ad051b8b508f69e7dcf8febdea2856ec2db73ed"
[[projects]]
name = "github.com/opentracing/opentracing-go"
packages = [
@ -437,6 +447,16 @@
revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0"
version = "v0.3.0"
[[projects]]
branch = "master"
name = "golang.org/x/tools"
packages = [
"go/ast/astutil",
"imports",
"internal/fastwalk"
]
revision = "28aef64757f4d432485ab970b094e1af8b301e84"
[[projects]]
name = "gopkg.in/yaml.v2"
packages = ["."]
@ -446,6 +466,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "7e6c508f4349af14fb56dd5feb04d7f1ac10f0c88caf74f12f115c456b039940"
inputs-digest = "ed7f4f2ab4a6f007ba6377ae04b0405146ea60608de96ef76ab19eef8113e694"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -25,6 +25,11 @@
# unused-packages = true
required = [
# Pigeon is the command used to generate the IFQL parser from the PEG description
"github.com/mna/pigeon"
]
[[constraint]]
name = "github.com/google/go-cmp"
version = "0.2.0"
@ -33,6 +38,13 @@
name = "github.com/influxdata/influxdb"
branch = "master"
# Pigeon hasn't made official releases for a while, we need to use master for now.
# We plan to replace pigeon with a hand written parser, as such this dependency is short lived.
[[override]]
branch = "master"
name = "github.com/mna/pigeon"
# Dependency is pinned to explicit revision rather than latest release because
# latest release pre-dates context, which we need.
[[constraint]]

99
Makefile Normal file
View File

@ -0,0 +1,99 @@
# Top level Makefile for the entire project
#
# This Makefile follows a few conventions:
#
# * All cmds must be added to this top level Makefile.
# * All binaries are placed in ./bin, its recommended to add this directory to your PATH.
# * Each package that has a need to run go generate, must have its own Makefile for that purpose.
# * All recursive Makefiles must support the targets: all and clean.
#
SUBDIRS := query
GO_ARGS=-tags '$(GO_TAGS)'
# Test vars can be used by all recursive Makefiles
export GO_BUILD=go build $(GO_ARGS)
export GO_TEST=go test $(GO_ARGS)
export GO_GENERATE=go generate $(GO_ARGS)
# All go source files
SOURCES := $(shell find . -name '*.go' -not -name '*_test.go')
# All go source files excluding the vendored sources.
SOURCES_NO_VENDOR := $(shell find . -path ./vendor -prune -o -name "*.go" -not -name '*_test.go' -print)
# List of binary cmds to build
CMDS := bin/idp bin/idpd bin/ifqld
# List of utilities to build as part of the build process
UTILS := bin/pigeon bin/cmpgen
# Default target to build all commands.
#
# This target setups the dependencies to correctly build all commands.
# Other targets must depend on this target to correctly builds CMDS.
all: Gopkg.lock $(UTILS) subdirs $(CMDS)
# Target to build subdirs.
# Each subdirs must support the `all` target.
subdirs: $(SUBDIRS)
$(MAKE) -C $^ all
#
# Define targets for commands
#
bin/ifqld: $(SOURCES)
$(GO_BUILD) -i -o bin/ifqld ./cmd/ifqld
bin/idp: $(SOURCES)
$(GO_BUILD) -i -o bin/idp ./cmd/idp
bin/idpd: $(SOURCES)
$(GO_BUILD) -i -o bin/idpd ./cmd/idpd
#
# Define targets for utilities
#
bin/pigeon: ./vendor/github.com/mna/pigeon/main.go
go build -i -o bin/pigeon ./vendor/github.com/mna/pigeon
bin/cmpgen: ./query/ast/asttest/cmpgen/main.go
go build -i -o bin/cmpgen ./query/ast/asttest/cmpgen
#
# Define how source dependencies are managed
#
Gopkg.lock: Gopkg.toml
dep ensure -v
vendor/github.com/mna/pigeon/main.go: Gopkg.lock
dep ensure -v
#
# Define action only targets
#
fmt: $(SOURCES_NO_VENDOR)
goimports -w $^
test: all
$(GO_TEST) ./...
test-race: all
$(GO_TEST) -race ./...
bench: all
$(GO_TEST) -bench=. -run=^$$ ./...
# Recursively clean all subdirs
clean: $(SUBDIRS)
$(MAKE) -C $^ $(MAKECMDGOALS)
rm -rf bin
# .PHONY targets represent actions that do not create an actual file.
.PHONY: all subdirs $(SUBDIRS) fmt test test-race bench clean

View File

@ -1,66 +1,13 @@
VERSION ?= $(shell git describe --always --tags)
SUBDIRS := ast parser promql
GO_ARGS=-tags '$(GO_TAGS)'
export GO_BUILD=go build $(GO_ARGS)
export GO_TEST=go test $(GO_ARGS)
export GO_GENERATE=go generate $(GO_ARGS)
SOURCES := $(shell find . -name '*.go' -not -name '*_test.go')
SOURCES_NO_VENDOR := $(shell find . -path ./vendor -prune -o -name "*.go" -not -name '*_test.go' -print)
SUBDIRS = ast parser promql
all: Gopkg.lock $(SUBDIRS) bin/platform/query bin/ifqld
subdirs: $(SUBDIRS)
$(SUBDIRS): bin/pigeon bin/cmpgen
$(SUBDIRS):
$(MAKE) -C $@ $(MAKECMDGOALS)
bin/platform/query: $(SOURCES) bin/pigeon bin/cmpgen
$(GO_BUILD) -i -o bin/platform/query ./cmd/ifql
bin/platform/queryd: $(SOURCES) bin/pigeon bin/cmpgen
$(GO_BUILD) -i -o bin/platform/queryd ./cmd/ifqld
bin/pigeon: ./vendor/github.com/mna/pigeon/main.go
go build -i -o bin/pigeon ./vendor/github.com/mna/pigeon
bin/cmpgen: ./ast/asttest/cmpgen/main.go
go build -i -o bin/cmpgen ./ast/asttest/cmpgen
Gopkg.lock: Gopkg.toml
dep ensure -v
vendor/github.com/mna/pigeon/main.go: Gopkg.lock
dep ensure -v
fmt: $(SOURCES_NO_VENDOR)
goimports -w $^
update:
dep ensure -v -update
test: Gopkg.lock bin/platform/query
$(GO_TEST) ./...
test-race: Gopkg.lock bin/platform/query
$(GO_TEST) -race ./...
bench: Gopkg.lock bin/platform/query
$(GO_TEST) -bench=. -run=^$$ ./...
bin/goreleaser:
go build -i -o bin/goreleaser ./vendor/github.com/goreleaser/goreleaser
dist: bin/goreleaser
PATH=./bin:${PATH} goreleaser --rm-dist --release-notes CHANGELOG.md
release: dist release-docker
release-docker:
docker build -t quay.io/influxdb/platform/queryd:latest .
docker tag quay.io/influxdb/platform/queryd:latest quay.io/influxdb/ifqld:${VERSION}
docker push quay.io/influxdb/platform/queryd:latest
docker push quay.io/influxdb/platform/queryd:${VERSION}
all: $(SUBDIRS)
clean: $(SUBDIRS)
rm -rf bin dist
.PHONY: all clean $(SUBDIRS) update test test-race bench release docker dist fmt
.PHONY: all clean subdirs $(SUBDIRS)

View File

@ -1,9 +1,13 @@
SUBDIRS := asttest
SUBDIRS = asttest
subdirs: $(SUBDIRS)
$(SUBDIRS):
$(MAKE) -C $@ $(MAKECMDGOALS)
all: $(SUBDIRS)
clean: $(SUBDIRS)
.PHONY: $(SUBDIRS) clean
.PHONY: all clean subdirs $(SUBDIRS)

View File

@ -1,7 +1,7 @@
all: cmpopts.go
cmpopts.go: ../ast.go gen.go ../../bin/cmpgen
PATH=../../bin:${PATH} $(GO_GENERATE) -x ./...
cmpopts.go: ../ast.go gen.go ../../../bin/cmpgen
PATH=../../../bin:${PATH} $(GO_GENERATE) -x ./...
clean:
rm -f cmpopts.go

View File

@ -3,8 +3,8 @@ package query
import (
"context"
"github.com/influxdata/platform/query/id"
"github.com/influxdata/platform"
"github.com/influxdata/platform/query/id"
)
// FromBucketService wraps an platform.BucketService in the BucketLookup interface.

View File

@ -3,11 +3,11 @@ package functions
import (
"fmt"
"github.com/influxdata/platform/query"
"github.com/influxdata/platform/query/execute"
"github.com/influxdata/platform/query/functions/storage"
"github.com/influxdata/platform/query/id"
"github.com/influxdata/platform/query/interpreter"
"github.com/influxdata/platform/query"
"github.com/influxdata/platform/query/execute"
"github.com/influxdata/platform/query/plan"
"github.com/influxdata/platform/query/semantic"
"github.com/pkg/errors"

View File

@ -6,12 +6,12 @@ import (
"fmt"
"time"
"github.com/influxdata/platform/query/ast"
"github.com/influxdata/platform/query/functions"
"github.com/influxdata/platform/query"
"github.com/influxdata/platform/query/execute"
"github.com/influxdata/platform/query/semantic"
"github.com/influxdata/influxql"
"github.com/influxdata/platform/query"
"github.com/influxdata/platform/query/ast"
"github.com/influxdata/platform/query/execute"
"github.com/influxdata/platform/query/functions"
"github.com/influxdata/platform/query/semantic"
)
// Transpiler converts InfluxQL queries into a query spec.

View File

@ -1,8 +1,7 @@
all: ifql.go
ifql.go: ifql.peg parser.go parser_debug.go ../bin/pigeon
PATH=../bin:${PATH} $(GO_GENERATE) -x ./...
ifql.go: ifql.peg parser.go parser_debug.go ../../bin/pigeon
PATH=../../bin:${PATH} $(GO_GENERATE) -x ./...
clean:
rm -f ifql.go

View File

@ -6961,6 +6961,10 @@ var (
// errNoRule is returned when the grammar to parse has no rule.
errNoRule = errors.New("grammar has no rule")
// errInvalidEntrypoint is returned when the specified entrypoint rule
// does not exit.
errInvalidEntrypoint = errors.New("invalid entrypoint")
// errInvalidEncoding is returned when the source is not properly
// utf8-encoded.
errInvalidEncoding = errors.New("invalid encoding")
@ -6987,6 +6991,38 @@ func MaxExpressions(maxExprCnt uint64) Option {
}
}
// Entrypoint creates an Option to set the rule name to use as entrypoint.
// The rule name must have been specified in the -alternate-entrypoints
// if generating the parser with the -optimize-grammar flag, otherwise
// it may have been optimized out. Passing an empty string sets the
// entrypoint to the first rule in the grammar.
//
// The default is to start parsing at the first rule in the grammar.
func Entrypoint(ruleName string) Option {
return func(p *parser) Option {
oldEntrypoint := p.entrypoint
p.entrypoint = ruleName
if ruleName == "" {
p.entrypoint = g.rules[0].name
}
return Entrypoint(oldEntrypoint)
}
}
// AllowInvalidUTF8 creates an Option to allow invalid UTF-8 bytes.
// Every invalid UTF-8 byte is treated as a utf8.RuneError (U+FFFD)
// by character class matchers and is matched by the any matcher.
// The returned matched value, c.text and c.offset are NOT affected.
//
// The default is false.
func AllowInvalidUTF8(b bool) Option {
return func(p *parser) Option {
old := p.allowInvalidUTF8
p.allowInvalidUTF8 = b
return AllowInvalidUTF8(old)
}
}
// Recover creates an Option to set the recover flag to b. When set to
// true, this causes the parser to recover from panics and convert it
// to an error. Setting it to false can be useful while debugging to
@ -7063,10 +7099,16 @@ type current struct {
pos position // start position of the match
text []byte // raw text of the match
// the globalStore allows the parser to store arbitrary values
globalStore map[string]interface{}
// globalStore is a general store for the user to store arbitrary key-value
// pairs that they need to manage and that they do not want tied to the
// backtracking of the parser. This is only modified by the user and never
// rolled back by the parser. It is always up to the user to keep this in a
// consistent state.
globalStore storeDict
}
type storeDict map[string]interface{}
// the AST types...
type grammar struct {
@ -7233,11 +7275,13 @@ func newParser(filename string, b []byte, opts ...Option) *parser {
pt: savepoint{position: position{line: 1}},
recover: true,
cur: current{
globalStore: make(map[string]interface{}),
globalStore: make(storeDict),
},
maxFailPos: position{col: 1, line: 1},
maxFailExpected: make([]string, 0, 20),
Stats: &stats,
// start rule is rule [0] unless an alternate entrypoint is specified
entrypoint: g.rules[0].name,
}
p.setOptions(opts)
@ -7310,12 +7354,19 @@ type parser struct {
// max number of expressions to be parsed
maxExprCnt uint64
// entrypoint for the parser
entrypoint string
allowInvalidUTF8 bool
*Stats
choiceNoMatch string
// recovery expression stack, keeps track of the currently available recovery expression, these are traversed in reverse
recoveryStack []map[string]interface{}
// emptyState contains an empty storeDict, which is used to optimize cloneState if global "state" store is not used.
emptyState storeDict
}
// push a variable set on the vstack.
@ -7434,8 +7485,8 @@ func (p *parser) read() {
p.pt.col = 0
}
if rn == utf8.RuneError {
if n == 1 {
if rn == utf8.RuneError && n == 1 { // see utf8.DecodeRune
if !p.allowInvalidUTF8 {
p.addErr(errInvalidEncoding)
}
}
@ -7487,9 +7538,14 @@ func (p *parser) parse(g *grammar) (val interface{}, err error) {
}()
}
// start rule is rule [0]
startRule, ok := p.rules[p.entrypoint]
if !ok {
p.addErr(errInvalidEntrypoint)
return nil, p.errs.err()
}
p.read() // advance to first rune
val, ok := p.parseRule(g.rules[0])
val, ok = p.parseRule(startRule)
if !ok {
if len(*p.errs) == 0 {
// If parsing fails, but no errors have been recorded, the expected values
@ -7599,16 +7655,19 @@ func (p *parser) parseActionExpr(act *actionExpr) (interface{}, bool) {
if err != nil {
p.addErrAt(err, start.position, []string{})
}
val = actVal
}
return val, ok
}
func (p *parser) parseAndCodeExpr(and *andCodeExpr) (interface{}, bool) {
ok, err := and.run(p)
if err != nil {
p.addErr(err)
}
return nil, ok
}
@ -7618,18 +7677,20 @@ func (p *parser) parseAndExpr(and *andExpr) (interface{}, bool) {
_, ok := p.parseExpr(and.expr)
p.popV()
p.restore(pt)
return nil, ok
}
func (p *parser) parseAnyMatcher(any *anyMatcher) (interface{}, bool) {
if p.pt.rn != utf8.RuneError {
start := p.pt
p.read()
p.failAt(true, start.position, ".")
return p.sliceFrom(start), true
if p.pt.rn == utf8.RuneError && p.pt.w == 0 {
// EOF - see utf8.DecodeRune
p.failAt(false, p.pt.position, ".")
return nil, false
}
p.failAt(false, p.pt.position, ".")
return nil, false
start := p.pt
p.read()
p.failAt(true, start.position, ".")
return p.sliceFrom(start), true
}
func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool) {
@ -7637,7 +7698,7 @@ func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool
start := p.pt
// can't match EOF
if cur == utf8.RuneError {
if cur == utf8.RuneError && p.pt.w == 0 { // see utf8.DecodeRune
p.failAt(false, start.position, chr.val)
return nil, false
}
@ -7748,6 +7809,7 @@ func (p *parser) parseNotCodeExpr(not *notCodeExpr) (interface{}, bool) {
if err != nil {
p.addErr(err)
}
return nil, !ok
}
@ -7759,6 +7821,7 @@ func (p *parser) parseNotExpr(not *notExpr) (interface{}, bool) {
p.maxFailInvertExpected = !p.maxFailInvertExpected
p.popV()
p.restore(pt)
return nil, !ok
}

View File

@ -1,7 +1,7 @@
all: promql.go
promql.go: promql.peg gen.go ../bin/pigeon
PATH=../bin:${PATH} go generate -x ./...
promql.go: promql.peg gen.go ../../bin/pigeon
PATH=../../bin:${PATH} go generate -x ./...
clean:
rm -f promql.go

View File

@ -3618,6 +3618,10 @@ var (
// errNoRule is returned when the grammar to parse has no rule.
errNoRule = errors.New("grammar has no rule")
// errInvalidEntrypoint is returned when the specified entrypoint rule
// does not exit.
errInvalidEntrypoint = errors.New("invalid entrypoint")
// errInvalidEncoding is returned when the source is not properly
// utf8-encoded.
errInvalidEncoding = errors.New("invalid encoding")
@ -3644,6 +3648,24 @@ func MaxExpressions(maxExprCnt uint64) Option {
}
}
// Entrypoint creates an Option to set the rule name to use as entrypoint.
// The rule name must have been specified in the -alternate-entrypoints
// if generating the parser with the -optimize-grammar flag, otherwise
// it may have been optimized out. Passing an empty string sets the
// entrypoint to the first rule in the grammar.
//
// The default is to start parsing at the first rule in the grammar.
func Entrypoint(ruleName string) Option {
return func(p *parser) Option {
oldEntrypoint := p.entrypoint
p.entrypoint = ruleName
if ruleName == "" {
p.entrypoint = g.rules[0].name
}
return Entrypoint(oldEntrypoint)
}
}
// Statistics adds a user provided Stats struct to the parser to allow
// the user to process the results after the parsing has finished.
// Also the key for the "no match" counter is set.
@ -3701,6 +3723,20 @@ func Memoize(b bool) Option {
}
}
// AllowInvalidUTF8 creates an Option to allow invalid UTF-8 bytes.
// Every invalid UTF-8 byte is treated as a utf8.RuneError (U+FFFD)
// by character class matchers and is matched by the any matcher.
// The returned matched value, c.text and c.offset are NOT affected.
//
// The default is false.
func AllowInvalidUTF8(b bool) Option {
return func(p *parser) Option {
old := p.allowInvalidUTF8
p.allowInvalidUTF8 = b
return AllowInvalidUTF8(old)
}
}
// Recover creates an Option to set the recover flag to b. When set to
// true, this causes the parser to recover from panics and convert it
// to an error. Setting it to false can be useful while debugging to
@ -3725,6 +3761,16 @@ func GlobalStore(key string, value interface{}) Option {
}
}
// InitState creates an Option to set a key to a certain value in
// the global "state" store.
func InitState(key string, value interface{}) Option {
return func(p *parser) Option {
old := p.cur.state[key]
p.cur.state[key] = value
return InitState(key, old)
}
}
// ParseFile parses the file identified by filename.
func ParseFile(filename string, opts ...Option) (i interface{}, err error) {
f, err := os.Open(filename)
@ -3777,10 +3823,21 @@ type current struct {
pos position // start position of the match
text []byte // raw text of the match
// the globalStore allows the parser to store arbitrary values
globalStore map[string]interface{}
// state is a store for arbitrary key,value pairs that the user wants to be
// tied to the backtracking of the parser.
// This is always rolled back if a parsing rule fails.
state storeDict
// globalStore is a general store for the user to store arbitrary key-value
// pairs that they need to manage and that they do not want tied to the
// backtracking of the parser. This is only modified by the user and never
// rolled back by the parser. It is always up to the user to keep this in a
// consistent state.
globalStore storeDict
}
type storeDict map[string]interface{}
// the AST types...
type grammar struct {
@ -3845,6 +3902,11 @@ type ruleRefExpr struct {
name string
}
type stateCodeExpr struct {
pos position
run func(*parser) error
}
type andCodeExpr struct {
pos position
run func(*parser) (bool, error)
@ -3947,11 +4009,15 @@ func newParser(filename string, b []byte, opts ...Option) *parser {
pt: savepoint{position: position{line: 1}},
recover: true,
cur: current{
globalStore: make(map[string]interface{}),
state: make(storeDict),
globalStore: make(storeDict),
},
maxFailPos: position{col: 1, line: 1},
maxFailExpected: make([]string, 0, 20),
Stats: &stats,
// start rule is rule [0] unless an alternate entrypoint is specified
entrypoint: g.rules[0].name,
emptyState: make(storeDict),
}
p.setOptions(opts)
@ -4030,12 +4096,19 @@ type parser struct {
// max number of expressions to be parsed
maxExprCnt uint64
// entrypoint for the parser
entrypoint string
allowInvalidUTF8 bool
*Stats
choiceNoMatch string
// recovery expression stack, keeps track of the currently available recovery expression, these are traversed in reverse
recoveryStack []map[string]interface{}
// emptyState contains an empty storeDict, which is used to optimize cloneState if global "state" store is not used.
emptyState storeDict
}
// push a variable set on the vstack.
@ -4174,8 +4247,8 @@ func (p *parser) read() {
p.pt.col = 0
}
if rn == utf8.RuneError {
if n == 1 {
if rn == utf8.RuneError && n == 1 { // see utf8.DecodeRune
if !p.allowInvalidUTF8 {
p.addErr(errInvalidEncoding)
}
}
@ -4192,6 +4265,50 @@ func (p *parser) restore(pt savepoint) {
p.pt = pt
}
// Cloner is implemented by any value that has a Clone method, which returns a
// copy of the value. This is mainly used for types which are not passed by
// value (e.g map, slice, chan) or structs that contain such types.
//
// This is used in conjunction with the global state feature to create proper
// copies of the state to allow the parser to properly restore the state in
// the case of backtracking.
type Cloner interface {
Clone() interface{}
}
// clone and return parser current state.
func (p *parser) cloneState() storeDict {
if p.debug {
defer p.out(p.in("cloneState"))
}
if len(p.cur.state) == 0 {
if len(p.emptyState) > 0 {
p.emptyState = make(storeDict)
}
return p.emptyState
}
state := make(storeDict, len(p.cur.state))
for k, v := range p.cur.state {
if c, ok := v.(Cloner); ok {
state[k] = c.Clone()
} else {
state[k] = v
}
}
return state
}
// restore parser current state to the state storeDict.
// every restoreState should applied only one time for every cloned state
func (p *parser) restoreState(state storeDict) {
if p.debug {
defer p.out(p.in("restoreState"))
}
p.cur.state = state
}
// get the slice of bytes from the savepoint start to the current position.
func (p *parser) sliceFrom(start savepoint) []byte {
return p.data[start.position.offset:p.pt.position.offset]
@ -4257,9 +4374,14 @@ func (p *parser) parse(g *grammar) (val interface{}, err error) {
}()
}
// start rule is rule [0]
startRule, ok := p.rules[p.entrypoint]
if !ok {
p.addErr(errInvalidEntrypoint)
return nil, p.errs.err()
}
p.read() // advance to first rune
val, ok := p.parseRule(g.rules[0])
val, ok = p.parseRule(startRule)
if !ok {
if len(*p.errs) == 0 {
// If parsing fails, but no errors have been recorded, the expected values
@ -4377,6 +4499,8 @@ func (p *parser) parseExpr(expr interface{}) (interface{}, bool) {
val, ok = p.parseRuleRefExpr(expr)
case *seqExpr:
val, ok = p.parseSeqExpr(expr)
case *stateCodeExpr:
val, ok = p.parseStateCodeExpr(expr)
case *throwExpr:
val, ok = p.parseThrowExpr(expr)
case *zeroOrMoreExpr:
@ -4402,10 +4526,13 @@ func (p *parser) parseActionExpr(act *actionExpr) (interface{}, bool) {
if ok {
p.cur.pos = start.position
p.cur.text = p.sliceFrom(start)
state := p.cloneState()
actVal, err := act.run(p)
if err != nil {
p.addErrAt(err, start.position, []string{})
}
p.restoreState(state)
val = actVal
}
if ok && p.debug {
@ -4419,10 +4546,14 @@ func (p *parser) parseAndCodeExpr(and *andCodeExpr) (interface{}, bool) {
defer p.out(p.in("parseAndCodeExpr"))
}
state := p.cloneState()
ok, err := and.run(p)
if err != nil {
p.addErr(err)
}
p.restoreState(state)
return nil, ok
}
@ -4432,10 +4563,13 @@ func (p *parser) parseAndExpr(and *andExpr) (interface{}, bool) {
}
pt := p.pt
state := p.cloneState()
p.pushV()
_, ok := p.parseExpr(and.expr)
p.popV()
p.restoreState(state)
p.restore(pt)
return nil, ok
}
@ -4444,14 +4578,15 @@ func (p *parser) parseAnyMatcher(any *anyMatcher) (interface{}, bool) {
defer p.out(p.in("parseAnyMatcher"))
}
if p.pt.rn != utf8.RuneError {
start := p.pt
p.read()
p.failAt(true, start.position, ".")
return p.sliceFrom(start), true
if p.pt.rn == utf8.RuneError && p.pt.w == 0 {
// EOF - see utf8.DecodeRune
p.failAt(false, p.pt.position, ".")
return nil, false
}
p.failAt(false, p.pt.position, ".")
return nil, false
start := p.pt
p.read()
p.failAt(true, start.position, ".")
return p.sliceFrom(start), true
}
func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool) {
@ -4463,7 +4598,7 @@ func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool
start := p.pt
// can't match EOF
if cur == utf8.RuneError {
if cur == utf8.RuneError && p.pt.w == 0 { // see utf8.DecodeRune
p.failAt(false, start.position, chr.val)
return nil, false
}
@ -4544,6 +4679,8 @@ func (p *parser) parseChoiceExpr(ch *choiceExpr) (interface{}, bool) {
// dummy assignment to prevent compile error if optimized
_ = altI
state := p.cloneState()
p.pushV()
val, ok := p.parseExpr(alt)
p.popV()
@ -4551,6 +4688,7 @@ func (p *parser) parseChoiceExpr(ch *choiceExpr) (interface{}, bool) {
p.incChoiceAltCnt(ch, altI)
return val, ok
}
p.restoreState(state)
}
p.incChoiceAltCnt(ch, choiceNoMatch)
return nil, false
@ -4603,10 +4741,14 @@ func (p *parser) parseNotCodeExpr(not *notCodeExpr) (interface{}, bool) {
defer p.out(p.in("parseNotCodeExpr"))
}
state := p.cloneState()
ok, err := not.run(p)
if err != nil {
p.addErr(err)
}
p.restoreState(state)
return nil, !ok
}
@ -4616,12 +4758,15 @@ func (p *parser) parseNotExpr(not *notExpr) (interface{}, bool) {
}
pt := p.pt
state := p.cloneState()
p.pushV()
p.maxFailInvertExpected = !p.maxFailInvertExpected
_, ok := p.parseExpr(not.expr)
p.maxFailInvertExpected = !p.maxFailInvertExpected
p.popV()
p.restoreState(state)
p.restore(pt)
return nil, !ok
}
@ -4684,9 +4829,11 @@ func (p *parser) parseSeqExpr(seq *seqExpr) (interface{}, bool) {
vals := make([]interface{}, 0, len(seq.exprs))
pt := p.pt
state := p.cloneState()
for _, expr := range seq.exprs {
val, ok := p.parseExpr(expr)
if !ok {
p.restoreState(state)
p.restore(pt)
return nil, false
}
@ -4695,6 +4842,18 @@ func (p *parser) parseSeqExpr(seq *seqExpr) (interface{}, bool) {
return vals, true
}
func (p *parser) parseStateCodeExpr(state *stateCodeExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseStateCodeExpr"))
}
err := state.run(p)
if err != nil {
p.addErr(err)
}
return nil, true
}
func (p *parser) parseThrowExpr(expr *throwExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseThrowExpr"))

View File

@ -6,9 +6,9 @@ import (
"strings"
"time"
"github.com/influxdata/platform/query"
"github.com/influxdata/platform/query/ast"
"github.com/influxdata/platform/query/functions"
"github.com/influxdata/platform/query"
"github.com/influxdata/platform/query/semantic"
)

View File

@ -2,11 +2,12 @@ package main
import (
"fmt"
"golang.org/x/text/unicode/norm"
"io/ioutil"
"os"
"regexp"
"strings"
"golang.org/x/text/unicode/norm"
)
func normalizeString(s string) []byte {