fix(query): Group By queries with offset that crosses a DST boundary can fail (#20230)
* fix(query): Group By queries with offset that crosses a DST boundary can fail Customer reported that a GROUP BY query with an offset that caused an interval to cross a daylight savings change inserted an extra output row off by one hour. This fix ensured that the start time for the interval of a GROUP BY operator is correctly set before calculating the time zone offset for that date and time. Add TestGroupByIterator_DST() in query/iterator_test.go for regression testing of this bug. Fixes https://github.com/influxdata/influxdb/issues/20238pull/20250/head
parent
5d922e9d0e
commit
df39b1e71c
|
@ -815,6 +815,8 @@ func (opt IteratorOptions) Window(t int64) (start, end int64) {
|
|||
start = t - dt
|
||||
}
|
||||
|
||||
start += int64(opt.Interval.Offset)
|
||||
|
||||
// Look for the start offset again because the first time may have been
|
||||
// after the offset switch. Now that we are at midnight in UTC, we can
|
||||
// lookup the zone offset again to get the real starting offset.
|
||||
|
@ -826,7 +828,6 @@ func (opt IteratorOptions) Window(t int64) (start, end int64) {
|
|||
start += o
|
||||
}
|
||||
}
|
||||
start += int64(opt.Interval.Offset)
|
||||
|
||||
// Find the end time.
|
||||
if dt := int64(opt.Interval.Duration) - dt; influxql.MaxTime-dt <= t {
|
||||
|
|
|
@ -825,6 +825,61 @@ func TestFillIterator_ImplicitStartTime(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
// A count() GROUP BY query with an offset that caused an interval
|
||||
// to cross a daylight savings change inserted an extra output row
|
||||
// off by one hour in a grouped count() expression.
|
||||
// https://github.com/influxdata/influxdb/issues/20238
|
||||
|
||||
func TestGroupByIterator_DST(t *testing.T) {
|
||||
inputIter := &IntegerIterator{
|
||||
Points: []query.IntegerPoint{
|
||||
{Name: "a", Tags: ParseTags("t=A"), Time: 1584345600000000000, Value: 1},
|
||||
{Name: "a", Tags: ParseTags("t=A"), Time: 1584432000000000000, Value: 2},
|
||||
{Name: "a", Tags: ParseTags("t=A"), Time: 1584518400000000000, Value: 3},
|
||||
{Name: "a", Tags: ParseTags("t=A"), Time: 1585555200000000000, Value: 4},
|
||||
},
|
||||
}
|
||||
const location = "Europe/Rome"
|
||||
loc, err := time.LoadLocation(location)
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot find timezone for %s: %s", location, err)
|
||||
}
|
||||
opt := query.IteratorOptions{
|
||||
StartTime: mustParseTime("2020-03-15T00:00:00Z").UnixNano(),
|
||||
EndTime: mustParseTime("2020-04-01T00:00:00Z").UnixNano(),
|
||||
Ascending: true,
|
||||
Ordered: true,
|
||||
StripName: false,
|
||||
Fill: influxql.NullFill,
|
||||
FillValue: nil,
|
||||
Dedupe: false,
|
||||
Interval: query.Interval{
|
||||
Duration: 7 * 24 * time.Hour,
|
||||
Offset: 4 * 24 * time.Hour,
|
||||
},
|
||||
Expr: MustParseExpr("count(Value)"),
|
||||
Location: loc,
|
||||
}
|
||||
|
||||
groupByIter, err := query.NewCallIterator(inputIter, opt)
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot create Count and Group By iterator: %s", err)
|
||||
} else {
|
||||
groupByIter = query.NewFillIterator(groupByIter, MustParseExpr("count(Value)"), opt)
|
||||
}
|
||||
|
||||
if a, err := (Iterators{groupByIter}).ReadAll(); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
} else if !deep.Equal(a, [][]query.Point{
|
||||
{&query.IntegerPoint{Name: "a", Aggregated: 0, Time: mustParseTime("2020-03-09T00:00:00+01:00").UnixNano(), Value: 0}},
|
||||
{&query.IntegerPoint{Name: "a", Aggregated: 3, Time: mustParseTime("2020-03-16T00:00:00+01:00").UnixNano(), Value: 3}},
|
||||
{&query.IntegerPoint{Name: "a", Aggregated: 0, Time: mustParseTime("2020-03-23T00:00:00+01:00").UnixNano(), Value: 0}},
|
||||
{&query.IntegerPoint{Name: "a", Aggregated: 1, Time: mustParseTime("2020-03-30T00:00:00+02:00").UnixNano(), Value: 1}},
|
||||
}) {
|
||||
t.Fatalf("unexpected points: %s", spew.Sdump(a))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFillIterator_DST(t *testing.T) {
|
||||
for _, tt := range []struct {
|
||||
name string
|
||||
|
|
Loading…
Reference in New Issue