unexport some functions

For good measure basically. Not needed for correctness, but it keeps
people from using these.
pull/4085/head
Daniel Morsing 2015-09-01 14:58:55 -07:00 committed by Daniel Morsing
parent 66fc270d1e
commit ab9ae468ca
4 changed files with 38 additions and 38 deletions

View File

@ -386,9 +386,9 @@ func (e *SelectExecutor) executeAggregate(out chan *influxql.Row) {
// the offsets within the value slices that are returned by the // the offsets within the value slices that are returned by the
// mapper. // mapper.
aggregates := e.stmt.FunctionCalls() aggregates := e.stmt.FunctionCalls()
reduceFuncs := make([]ReduceFunc, len(aggregates)) reduceFuncs := make([]reduceFunc, len(aggregates))
for i, c := range aggregates { for i, c := range aggregates {
reduceFunc, err := InitializeReduceFunc(c) reduceFunc, err := initializeReduceFunc(c)
if err != nil { if err != nil {
out <- &influxql.Row{Err: err} out <- &influxql.Row{Err: err}
return return

View File

@ -4,7 +4,7 @@ package tsdb
// Query functions are represented as two discreet functions: Map and Reduce. These roughly follow the MapReduce // Query functions are represented as two discreet functions: Map and Reduce. These roughly follow the MapReduce
// paradigm popularized by Google and Hadoop. // paradigm popularized by Google and Hadoop.
// //
// When adding an aggregate function, define a mapper, a reducer, and add them in the switch statement in the MapReduceFuncs function // When adding an aggregate function, define a mapper, a reducer, and add them in the switch statement in the MapreduceFuncs function
import ( import (
"encoding/json" "encoding/json"
@ -16,27 +16,27 @@ import (
"github.com/influxdb/influxdb/influxql" "github.com/influxdb/influxdb/influxql"
) )
// Iterator represents a forward-only iterator over a set of points. // iterator represents a forward-only iterator over a set of points.
// These are used by the MapFunctions in this file // These are used by the mapFunctions in this file
type Iterator interface { type iterator interface {
Next() (time int64, value interface{}) Next() (time int64, value interface{})
Tags() map[string]string Tags() map[string]string
TMin() int64 TMin() int64
} }
// MapFunc represents a function used for mapping over a sequential series of data. // mapFunc represents a function used for mapping over a sequential series of data.
// The iterator represents a single group by interval // The iterator represents a single group by interval
type MapFunc func(Iterator) interface{} type mapFunc func(iterator) interface{}
// ReduceFunc represents a function used for reducing mapper output. // reduceFunc represents a function used for reducing mapper output.
type ReduceFunc func([]interface{}) interface{} type reduceFunc func([]interface{}) interface{}
// UnmarshalFunc represents a function that can take bytes from a mapper from remote // UnmarshalFunc represents a function that can take bytes from a mapper from remote
// server and marshal it into an interface the reducer can use // server and marshal it into an interface the reducer can use
type UnmarshalFunc func([]byte) (interface{}, error) type unmarshalFunc func([]byte) (interface{}, error)
// InitializeMapFunc takes an aggregate call from the query and returns the MapFunc // initializemapFunc takes an aggregate call from the query and returns the mapFunc
func InitializeMapFunc(c *influxql.Call) (MapFunc, error) { func initializeMapFunc(c *influxql.Call) (mapFunc, error) {
// see if it's a query for raw data // see if it's a query for raw data
if c == nil { if c == nil {
return MapRawQuery, nil return MapRawQuery, nil
@ -75,7 +75,7 @@ func InitializeMapFunc(c *influxql.Call) (MapFunc, error) {
case "last": case "last":
return MapLast, nil return MapLast, nil
case "top": case "top":
return func(itr Iterator) interface{} { return func(itr iterator) interface{} {
return MapTop(itr, c) return MapTop(itr, c)
}, nil }, nil
case "percentile": case "percentile":
@ -84,7 +84,7 @@ func InitializeMapFunc(c *influxql.Call) (MapFunc, error) {
// If the arg is another aggregate e.g. derivative(mean(value)), then // If the arg is another aggregate e.g. derivative(mean(value)), then
// use the map func for that nested aggregate // use the map func for that nested aggregate
if fn, ok := c.Args[0].(*influxql.Call); ok { if fn, ok := c.Args[0].(*influxql.Call); ok {
return InitializeMapFunc(fn) return initializeMapFunc(fn)
} }
return MapRawQuery, nil return MapRawQuery, nil
default: default:
@ -92,8 +92,8 @@ func InitializeMapFunc(c *influxql.Call) (MapFunc, error) {
} }
} }
// InitializeReduceFunc takes an aggregate call from the query and returns the ReduceFunc // InitializereduceFunc takes an aggregate call from the query and returns the reduceFunc
func InitializeReduceFunc(c *influxql.Call) (ReduceFunc, error) { func initializeReduceFunc(c *influxql.Call) (reduceFunc, error) {
// Retrieve reduce function by name. // Retrieve reduce function by name.
switch c.Name { switch c.Name {
case "count": case "count":
@ -138,7 +138,7 @@ func InitializeReduceFunc(c *influxql.Call) (ReduceFunc, error) {
// If the arg is another aggregate e.g. derivative(mean(value)), then // If the arg is another aggregate e.g. derivative(mean(value)), then
// use the map func for that nested aggregate // use the map func for that nested aggregate
if fn, ok := c.Args[0].(*influxql.Call); ok { if fn, ok := c.Args[0].(*influxql.Call); ok {
return InitializeReduceFunc(fn) return initializeReduceFunc(fn)
} }
return nil, fmt.Errorf("expected function argument to %s", c.Name) return nil, fmt.Errorf("expected function argument to %s", c.Name)
default: default:
@ -146,7 +146,7 @@ func InitializeReduceFunc(c *influxql.Call) (ReduceFunc, error) {
} }
} }
func InitializeUnmarshaller(c *influxql.Call) (UnmarshalFunc, error) { func initializeUnmarshaller(c *influxql.Call) (unmarshalFunc, error) {
// if c is nil it's a raw data query // if c is nil it's a raw data query
if c == nil { if c == nil {
return func(b []byte) (interface{}, error) { return func(b []byte) (interface{}, error) {
@ -210,7 +210,7 @@ func InitializeUnmarshaller(c *influxql.Call) (UnmarshalFunc, error) {
} }
// MapCount computes the number of values in an iterator. // MapCount computes the number of values in an iterator.
func MapCount(itr Iterator) interface{} { func MapCount(itr iterator) interface{} {
n := float64(0) n := float64(0)
for k, _ := itr.Next(); k != -1; k, _ = itr.Next() { for k, _ := itr.Next(); k != -1; k, _ = itr.Next() {
n++ n++
@ -376,7 +376,7 @@ func (d interfaceValues) Less(i, j int) bool {
} }
// MapDistinct computes the unique values in an iterator. // MapDistinct computes the unique values in an iterator.
func MapDistinct(itr Iterator) interface{} { func MapDistinct(itr iterator) interface{} {
var index = make(map[interface{}]struct{}) var index = make(map[interface{}]struct{})
for time, value := itr.Next(); time != -1; time, value = itr.Next() { for time, value := itr.Next(); time != -1; time, value = itr.Next() {
@ -430,7 +430,7 @@ func ReduceDistinct(values []interface{}) interface{} {
} }
// MapCountDistinct computes the unique count of values in an iterator. // MapCountDistinct computes the unique count of values in an iterator.
func MapCountDistinct(itr Iterator) interface{} { func MapCountDistinct(itr iterator) interface{} {
var index = make(map[interface{}]struct{}) var index = make(map[interface{}]struct{})
for time, value := itr.Next(); time != -1; time, value = itr.Next() { for time, value := itr.Next(); time != -1; time, value = itr.Next() {
@ -474,7 +474,7 @@ const (
) )
// MapSum computes the summation of values in an iterator. // MapSum computes the summation of values in an iterator.
func MapSum(itr Iterator) interface{} { func MapSum(itr iterator) interface{} {
n := float64(0) n := float64(0)
count := 0 count := 0
var resultType NumberType var resultType NumberType
@ -529,7 +529,7 @@ func ReduceSum(values []interface{}) interface{} {
} }
// MapMean computes the count and sum of values in an iterator to be combined by the reducer. // MapMean computes the count and sum of values in an iterator to be combined by the reducer.
func MapMean(itr Iterator) interface{} { func MapMean(itr iterator) interface{} {
out := &meanMapOutput{} out := &meanMapOutput{}
for k, v := itr.Next(); k != -1; k, v = itr.Next() { for k, v := itr.Next(); k != -1; k, v = itr.Next() {
@ -735,7 +735,7 @@ type minMaxMapOut struct {
} }
// MapMin collects the values to pass to the reducer // MapMin collects the values to pass to the reducer
func MapMin(itr Iterator) interface{} { func MapMin(itr iterator) interface{} {
min := &minMaxMapOut{} min := &minMaxMapOut{}
pointsYielded := false pointsYielded := false
@ -798,7 +798,7 @@ func ReduceMin(values []interface{}) interface{} {
} }
// MapMax collects the values to pass to the reducer // MapMax collects the values to pass to the reducer
func MapMax(itr Iterator) interface{} { func MapMax(itr iterator) interface{} {
max := &minMaxMapOut{} max := &minMaxMapOut{}
pointsYielded := false pointsYielded := false
@ -866,7 +866,7 @@ type spreadMapOutput struct {
} }
// MapSpread collects the values to pass to the reducer // MapSpread collects the values to pass to the reducer
func MapSpread(itr Iterator) interface{} { func MapSpread(itr iterator) interface{} {
out := &spreadMapOutput{} out := &spreadMapOutput{}
pointsYielded := false pointsYielded := false
var val float64 var val float64
@ -927,7 +927,7 @@ func ReduceSpread(values []interface{}) interface{} {
} }
// MapStddev collects the values to pass to the reducer // MapStddev collects the values to pass to the reducer
func MapStddev(itr Iterator) interface{} { func MapStddev(itr iterator) interface{} {
var values []float64 var values []float64
for k, v := itr.Next(); k != -1; k, v = itr.Next() { for k, v := itr.Next(); k != -1; k, v = itr.Next() {
@ -985,7 +985,7 @@ type firstLastMapOutput struct {
// MapFirst collects the values to pass to the reducer // MapFirst collects the values to pass to the reducer
// This function assumes time ordered input // This function assumes time ordered input
func MapFirst(itr Iterator) interface{} { func MapFirst(itr iterator) interface{} {
k, v := itr.Next() k, v := itr.Next()
if k == -1 { if k == -1 {
return nil return nil
@ -1030,7 +1030,7 @@ func ReduceFirst(values []interface{}) interface{} {
} }
// MapLast collects the values to pass to the reducer // MapLast collects the values to pass to the reducer
func MapLast(itr Iterator) interface{} { func MapLast(itr iterator) interface{} {
out := &firstLastMapOutput{} out := &firstLastMapOutput{}
pointsYielded := false pointsYielded := false
@ -1354,7 +1354,7 @@ func topCallArgs(c *influxql.Call) []string {
} }
// MapTop emits the top data points for each group by interval // MapTop emits the top data points for each group by interval
func MapTop(itr Iterator, c *influxql.Call) interface{} { func MapTop(itr iterator, c *influxql.Call) interface{} {
// Capture the limit if it was specified in the call // Capture the limit if it was specified in the call
lit, _ := c.Args[len(c.Args)-1].(*influxql.NumberLiteral) lit, _ := c.Args[len(c.Args)-1].(*influxql.NumberLiteral)
limit := int64(lit.Val) limit := int64(lit.Val)
@ -1493,7 +1493,7 @@ func ReduceTop(values []interface{}, c *influxql.Call) interface{} {
} }
// MapEcho emits the data points for each group by interval // MapEcho emits the data points for each group by interval
func MapEcho(itr Iterator) interface{} { func MapEcho(itr iterator) interface{} {
var values []interface{} var values []interface{}
for k, v := itr.Next(); k != -1; k, v = itr.Next() { for k, v := itr.Next(); k != -1; k, v = itr.Next() {
@ -1549,7 +1549,7 @@ func IsNumeric(c *influxql.Call) bool {
} }
// MapRawQuery is for queries without aggregates // MapRawQuery is for queries without aggregates
func MapRawQuery(itr Iterator) interface{} { func MapRawQuery(itr iterator) interface{} {
var values []*rawQueryMapOutput var values []*rawQueryMapOutput
for k, v := itr.Next(); k != -1; k, v = itr.Next() { for k, v := itr.Next(); k != -1; k, v = itr.Next() {
val := &rawQueryMapOutput{k, v} val := &rawQueryMapOutput{k, v}

View File

@ -108,7 +108,7 @@ func TestInitializeMapFuncDerivative(t *testing.T) {
}, },
} }
_, err := InitializeMapFunc(c) _, err := initializeMapFunc(c)
if err != nil { if err != nil {
t.Errorf("InitializeMapFunc(%v) unexpected error. got %v", c, err) t.Errorf("InitializeMapFunc(%v) unexpected error. got %v", c, err)
} }
@ -122,7 +122,7 @@ func TestInitializeMapFuncDerivative(t *testing.T) {
}, },
} }
_, err = InitializeMapFunc(c) _, err = initializeMapFunc(c)
if err != nil { if err != nil {
t.Errorf("InitializeMapFunc(%v) unexpected error. got %v", c, err) t.Errorf("InitializeMapFunc(%v) unexpected error. got %v", c, err)
} }

View File

@ -63,7 +63,7 @@ type SelectMapper struct {
intervalSize int64 // Size of each interval. intervalSize int64 // Size of each interval.
numIntervals int // Maximum number of intervals to return. numIntervals int // Maximum number of intervals to return.
currInterval int // Current interval for which data is being fetched. currInterval int // Current interval for which data is being fetched.
mapFuncs []MapFunc // The mapping functions. mapFuncs []mapFunc // The mapping functions.
fieldNames []string // the field name being read for mapping. fieldNames []string // the field name being read for mapping.
} }
@ -500,10 +500,10 @@ func (lm *SelectMapper) initializeMapFunctions() error {
var err error var err error
// Set up each mapping function for this statement. // Set up each mapping function for this statement.
aggregates := lm.selectStmt.FunctionCalls() aggregates := lm.selectStmt.FunctionCalls()
lm.mapFuncs = make([]MapFunc, len(aggregates)) lm.mapFuncs = make([]mapFunc, len(aggregates))
lm.fieldNames = make([]string, len(lm.mapFuncs)) lm.fieldNames = make([]string, len(lm.mapFuncs))
for i, c := range aggregates { for i, c := range aggregates {
lm.mapFuncs[i], err = InitializeMapFunc(c) lm.mapFuncs[i], err = initializeMapFunc(c)
if err != nil { if err != nil {
return err return err
} }