prometheus
3910 строк · 112.1 Кб
1// Copyright 2016 The Prometheus Authors
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14package promql_test
15
16import (
17"context"
18"errors"
19"fmt"
20"math"
21"os"
22"sort"
23"strconv"
24"testing"
25"time"
26
27"github.com/stretchr/testify/require"
28"go.uber.org/goleak"
29
30"github.com/prometheus/prometheus/model/histogram"
31"github.com/prometheus/prometheus/model/labels"
32"github.com/prometheus/prometheus/model/timestamp"
33"github.com/prometheus/prometheus/promql"
34"github.com/prometheus/prometheus/promql/parser"
35"github.com/prometheus/prometheus/promql/parser/posrange"
36"github.com/prometheus/prometheus/promql/promqltest"
37"github.com/prometheus/prometheus/storage"
38"github.com/prometheus/prometheus/util/annotations"
39"github.com/prometheus/prometheus/util/stats"
40"github.com/prometheus/prometheus/util/teststorage"
41"github.com/prometheus/prometheus/util/testutil"
42)
43
44const (
45env = "query execution"
46defaultLookbackDelta = 5 * time.Minute
47defaultEpsilon = 0.000001 // Relative error allowed for sample values.
48)
49
50func TestMain(m *testing.M) {
51goleak.VerifyTestMain(m)
52}
53
54func TestQueryConcurrency(t *testing.T) {
55maxConcurrency := 10
56
57dir, err := os.MkdirTemp("", "test_concurrency")
58require.NoError(t, err)
59defer os.RemoveAll(dir)
60queryTracker := promql.NewActiveQueryTracker(dir, maxConcurrency, nil)
61t.Cleanup(queryTracker.Close)
62
63opts := promql.EngineOpts{
64Logger: nil,
65Reg: nil,
66MaxSamples: 10,
67Timeout: 100 * time.Second,
68ActiveQueryTracker: queryTracker,
69}
70
71engine := promql.NewEngine(opts)
72ctx, cancelCtx := context.WithCancel(context.Background())
73defer cancelCtx()
74
75block := make(chan struct{})
76processing := make(chan struct{})
77done := make(chan int)
78defer close(done)
79
80f := func(context.Context) error {
81select {
82case processing <- struct{}{}:
83case <-done:
84}
85
86select {
87case <-block:
88case <-done:
89}
90return nil
91}
92
93for i := 0; i < maxConcurrency; i++ {
94q := engine.NewTestQuery(f)
95go q.Exec(ctx)
96select {
97case <-processing:
98// Expected.
99case <-time.After(20 * time.Millisecond):
100require.Fail(t, "Query within concurrency threshold not being executed")
101}
102}
103
104q := engine.NewTestQuery(f)
105go q.Exec(ctx)
106
107select {
108case <-processing:
109require.Fail(t, "Query above concurrency threshold being executed")
110case <-time.After(20 * time.Millisecond):
111// Expected.
112}
113
114// Terminate a running query.
115block <- struct{}{}
116
117select {
118case <-processing:
119// Expected.
120case <-time.After(20 * time.Millisecond):
121require.Fail(t, "Query within concurrency threshold not being executed")
122}
123
124// Terminate remaining queries.
125for i := 0; i < maxConcurrency; i++ {
126block <- struct{}{}
127}
128}
129
130// contextDone returns an error if the context was canceled or timed out.
131func contextDone(ctx context.Context, env string) error {
132if err := ctx.Err(); err != nil {
133switch {
134case errors.Is(err, context.Canceled):
135return promql.ErrQueryCanceled(env)
136case errors.Is(err, context.DeadlineExceeded):
137return promql.ErrQueryTimeout(env)
138default:
139return err
140}
141}
142return nil
143}
144
145func TestQueryTimeout(t *testing.T) {
146opts := promql.EngineOpts{
147Logger: nil,
148Reg: nil,
149MaxSamples: 10,
150Timeout: 5 * time.Millisecond,
151}
152engine := promql.NewEngine(opts)
153ctx, cancelCtx := context.WithCancel(context.Background())
154defer cancelCtx()
155
156query := engine.NewTestQuery(func(ctx context.Context) error {
157time.Sleep(100 * time.Millisecond)
158return contextDone(ctx, "test statement execution")
159})
160
161res := query.Exec(ctx)
162require.Error(t, res.Err, "expected timeout error but got none")
163
164var e promql.ErrQueryTimeout
165require.ErrorAs(t, res.Err, &e, "expected timeout error but got: %s", res.Err)
166}
167
168const errQueryCanceled = promql.ErrQueryCanceled("test statement execution")
169
170func TestQueryCancel(t *testing.T) {
171opts := promql.EngineOpts{
172Logger: nil,
173Reg: nil,
174MaxSamples: 10,
175Timeout: 10 * time.Second,
176}
177engine := promql.NewEngine(opts)
178ctx, cancelCtx := context.WithCancel(context.Background())
179defer cancelCtx()
180
181// Cancel a running query before it completes.
182block := make(chan struct{})
183processing := make(chan struct{})
184
185query1 := engine.NewTestQuery(func(ctx context.Context) error {
186processing <- struct{}{}
187<-block
188return contextDone(ctx, "test statement execution")
189})
190
191var res *promql.Result
192
193go func() {
194res = query1.Exec(ctx)
195processing <- struct{}{}
196}()
197
198<-processing
199query1.Cancel()
200block <- struct{}{}
201<-processing
202
203require.Error(t, res.Err, "expected cancellation error for query1 but got none")
204require.Equal(t, errQueryCanceled, res.Err)
205
206// Canceling a query before starting it must have no effect.
207query2 := engine.NewTestQuery(func(ctx context.Context) error {
208return contextDone(ctx, "test statement execution")
209})
210
211query2.Cancel()
212res = query2.Exec(ctx)
213require.NoError(t, res.Err)
214}
215
216// errQuerier implements storage.Querier which always returns error.
217type errQuerier struct {
218err error
219}
220
221func (q *errQuerier) Select(context.Context, bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet {
222return errSeriesSet{err: q.err}
223}
224
225func (*errQuerier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) {
226return nil, nil, nil
227}
228
229func (*errQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) {
230return nil, nil, nil
231}
232func (*errQuerier) Close() error { return nil }
233
234// errSeriesSet implements storage.SeriesSet which always returns error.
235type errSeriesSet struct {
236err error
237}
238
239func (errSeriesSet) Next() bool { return false }
240func (errSeriesSet) At() storage.Series { return nil }
241func (e errSeriesSet) Err() error { return e.err }
242func (e errSeriesSet) Warnings() annotations.Annotations { return nil }
243
244func TestQueryError(t *testing.T) {
245opts := promql.EngineOpts{
246Logger: nil,
247Reg: nil,
248MaxSamples: 10,
249Timeout: 10 * time.Second,
250}
251engine := promql.NewEngine(opts)
252errStorage := promql.ErrStorage{errors.New("storage error")}
253queryable := storage.QueryableFunc(func(mint, maxt int64) (storage.Querier, error) {
254return &errQuerier{err: errStorage}, nil
255})
256ctx, cancelCtx := context.WithCancel(context.Background())
257defer cancelCtx()
258
259vectorQuery, err := engine.NewInstantQuery(ctx, queryable, nil, "foo", time.Unix(1, 0))
260require.NoError(t, err)
261
262res := vectorQuery.Exec(ctx)
263require.Error(t, res.Err, "expected error on failed select but got none")
264require.ErrorIs(t, res.Err, errStorage, "expected error doesn't match")
265
266matrixQuery, err := engine.NewInstantQuery(ctx, queryable, nil, "foo[1m]", time.Unix(1, 0))
267require.NoError(t, err)
268
269res = matrixQuery.Exec(ctx)
270require.Error(t, res.Err, "expected error on failed select but got none")
271require.ErrorIs(t, res.Err, errStorage, "expected error doesn't match")
272}
273
274type noopHintRecordingQueryable struct {
275hints []*storage.SelectHints
276}
277
278func (h *noopHintRecordingQueryable) Querier(int64, int64) (storage.Querier, error) {
279return &hintRecordingQuerier{Querier: &errQuerier{}, h: h}, nil
280}
281
282type hintRecordingQuerier struct {
283storage.Querier
284
285h *noopHintRecordingQueryable
286}
287
288func (h *hintRecordingQuerier) Select(ctx context.Context, sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.SeriesSet {
289h.h.hints = append(h.h.hints, hints)
290return h.Querier.Select(ctx, sortSeries, hints, matchers...)
291}
292
293func TestSelectHintsSetCorrectly(t *testing.T) {
294opts := promql.EngineOpts{
295Logger: nil,
296Reg: nil,
297MaxSamples: 10,
298Timeout: 10 * time.Second,
299LookbackDelta: 5 * time.Second,
300EnableAtModifier: true,
301}
302
303for _, tc := range []struct {
304query string
305
306// All times are in milliseconds.
307start int64
308end int64
309
310// TODO(bwplotka): Add support for better hints when subquerying.
311expected []*storage.SelectHints
312}{
313{
314query: "foo", start: 10000,
315expected: []*storage.SelectHints{
316{Start: 5000, End: 10000},
317},
318}, {
319query: "foo @ 15", start: 10000,
320expected: []*storage.SelectHints{
321{Start: 10000, End: 15000},
322},
323}, {
324query: "foo @ 1", start: 10000,
325expected: []*storage.SelectHints{
326{Start: -4000, End: 1000},
327},
328}, {
329query: "foo[2m]", start: 200000,
330expected: []*storage.SelectHints{
331{Start: 80000, End: 200000, Range: 120000},
332},
333}, {
334query: "foo[2m] @ 180", start: 200000,
335expected: []*storage.SelectHints{
336{Start: 60000, End: 180000, Range: 120000},
337},
338}, {
339query: "foo[2m] @ 300", start: 200000,
340expected: []*storage.SelectHints{
341{Start: 180000, End: 300000, Range: 120000},
342},
343}, {
344query: "foo[2m] @ 60", start: 200000,
345expected: []*storage.SelectHints{
346{Start: -60000, End: 60000, Range: 120000},
347},
348}, {
349query: "foo[2m] offset 2m", start: 300000,
350expected: []*storage.SelectHints{
351{Start: 60000, End: 180000, Range: 120000},
352},
353}, {
354query: "foo[2m] @ 200 offset 2m", start: 300000,
355expected: []*storage.SelectHints{
356{Start: -40000, End: 80000, Range: 120000},
357},
358}, {
359query: "foo[2m:1s]", start: 300000,
360expected: []*storage.SelectHints{
361{Start: 175000, End: 300000, Step: 1000},
362},
363}, {
364query: "count_over_time(foo[2m:1s])", start: 300000,
365expected: []*storage.SelectHints{
366{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
367},
368}, {
369query: "count_over_time(foo[2m:1s] @ 300)", start: 200000,
370expected: []*storage.SelectHints{
371{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
372},
373}, {
374query: "count_over_time(foo[2m:1s] @ 200)", start: 200000,
375expected: []*storage.SelectHints{
376{Start: 75000, End: 200000, Func: "count_over_time", Step: 1000},
377},
378}, {
379query: "count_over_time(foo[2m:1s] @ 100)", start: 200000,
380expected: []*storage.SelectHints{
381{Start: -25000, End: 100000, Func: "count_over_time", Step: 1000},
382},
383}, {
384query: "count_over_time(foo[2m:1s] offset 10s)", start: 300000,
385expected: []*storage.SelectHints{
386{Start: 165000, End: 290000, Func: "count_over_time", Step: 1000},
387},
388}, {
389query: "count_over_time((foo offset 10s)[2m:1s] offset 10s)", start: 300000,
390expected: []*storage.SelectHints{
391{Start: 155000, End: 280000, Func: "count_over_time", Step: 1000},
392},
393}, {
394// When the @ is on the vector selector, the enclosing subquery parameters
395// don't affect the hint ranges.
396query: "count_over_time((foo @ 200 offset 10s)[2m:1s] offset 10s)", start: 300000,
397expected: []*storage.SelectHints{
398{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
399},
400}, {
401// When the @ is on the vector selector, the enclosing subquery parameters
402// don't affect the hint ranges.
403query: "count_over_time((foo @ 200 offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000,
404expected: []*storage.SelectHints{
405{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
406},
407}, {
408query: "count_over_time((foo offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000,
409expected: []*storage.SelectHints{
410{Start: -45000, End: 80000, Func: "count_over_time", Step: 1000},
411},
412}, {
413query: "foo", start: 10000, end: 20000,
414expected: []*storage.SelectHints{
415{Start: 5000, End: 20000, Step: 1000},
416},
417}, {
418query: "foo @ 15", start: 10000, end: 20000,
419expected: []*storage.SelectHints{
420{Start: 10000, End: 15000, Step: 1000},
421},
422}, {
423query: "foo @ 1", start: 10000, end: 20000,
424expected: []*storage.SelectHints{
425{Start: -4000, End: 1000, Step: 1000},
426},
427}, {
428query: "rate(foo[2m] @ 180)", start: 200000, end: 500000,
429expected: []*storage.SelectHints{
430{Start: 60000, End: 180000, Range: 120000, Func: "rate", Step: 1000},
431},
432}, {
433query: "rate(foo[2m] @ 300)", start: 200000, end: 500000,
434expected: []*storage.SelectHints{
435{Start: 180000, End: 300000, Range: 120000, Func: "rate", Step: 1000},
436},
437}, {
438query: "rate(foo[2m] @ 60)", start: 200000, end: 500000,
439expected: []*storage.SelectHints{
440{Start: -60000, End: 60000, Range: 120000, Func: "rate", Step: 1000},
441},
442}, {
443query: "rate(foo[2m])", start: 200000, end: 500000,
444expected: []*storage.SelectHints{
445{Start: 80000, End: 500000, Range: 120000, Func: "rate", Step: 1000},
446},
447}, {
448query: "rate(foo[2m] offset 2m)", start: 300000, end: 500000,
449expected: []*storage.SelectHints{
450{Start: 60000, End: 380000, Range: 120000, Func: "rate", Step: 1000},
451},
452}, {
453query: "rate(foo[2m:1s])", start: 300000, end: 500000,
454expected: []*storage.SelectHints{
455{Start: 175000, End: 500000, Func: "rate", Step: 1000},
456},
457}, {
458query: "count_over_time(foo[2m:1s])", start: 300000, end: 500000,
459expected: []*storage.SelectHints{
460{Start: 175000, End: 500000, Func: "count_over_time", Step: 1000},
461},
462}, {
463query: "count_over_time(foo[2m:1s] offset 10s)", start: 300000, end: 500000,
464expected: []*storage.SelectHints{
465{Start: 165000, End: 490000, Func: "count_over_time", Step: 1000},
466},
467}, {
468query: "count_over_time(foo[2m:1s] @ 300)", start: 200000, end: 500000,
469expected: []*storage.SelectHints{
470{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
471},
472}, {
473query: "count_over_time(foo[2m:1s] @ 200)", start: 200000, end: 500000,
474expected: []*storage.SelectHints{
475{Start: 75000, End: 200000, Func: "count_over_time", Step: 1000},
476},
477}, {
478query: "count_over_time(foo[2m:1s] @ 100)", start: 200000, end: 500000,
479expected: []*storage.SelectHints{
480{Start: -25000, End: 100000, Func: "count_over_time", Step: 1000},
481},
482}, {
483query: "count_over_time((foo offset 10s)[2m:1s] offset 10s)", start: 300000, end: 500000,
484expected: []*storage.SelectHints{
485{Start: 155000, End: 480000, Func: "count_over_time", Step: 1000},
486},
487}, {
488// When the @ is on the vector selector, the enclosing subquery parameters
489// don't affect the hint ranges.
490query: "count_over_time((foo @ 200 offset 10s)[2m:1s] offset 10s)", start: 300000, end: 500000,
491expected: []*storage.SelectHints{
492{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
493},
494}, {
495// When the @ is on the vector selector, the enclosing subquery parameters
496// don't affect the hint ranges.
497query: "count_over_time((foo @ 200 offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000, end: 500000,
498expected: []*storage.SelectHints{
499{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
500},
501}, {
502query: "count_over_time((foo offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000, end: 500000,
503expected: []*storage.SelectHints{
504{Start: -45000, End: 80000, Func: "count_over_time", Step: 1000},
505},
506}, {
507query: "sum by (dim1) (foo)", start: 10000,
508expected: []*storage.SelectHints{
509{Start: 5000, End: 10000, Func: "sum", By: true, Grouping: []string{"dim1"}},
510},
511}, {
512query: "sum without (dim1) (foo)", start: 10000,
513expected: []*storage.SelectHints{
514{Start: 5000, End: 10000, Func: "sum", Grouping: []string{"dim1"}},
515},
516}, {
517query: "sum by (dim1) (avg_over_time(foo[1s]))", start: 10000,
518expected: []*storage.SelectHints{
519{Start: 9000, End: 10000, Func: "avg_over_time", Range: 1000},
520},
521}, {
522query: "sum by (dim1) (max by (dim2) (foo))", start: 10000,
523expected: []*storage.SelectHints{
524{Start: 5000, End: 10000, Func: "max", By: true, Grouping: []string{"dim2"}},
525},
526}, {
527query: "(max by (dim1) (foo))[5s:1s]", start: 10000,
528expected: []*storage.SelectHints{
529{Start: 0, End: 10000, Func: "max", By: true, Grouping: []string{"dim1"}, Step: 1000},
530},
531}, {
532query: "(sum(http_requests{group=~\"p.*\"})+max(http_requests{group=~\"c.*\"}))[20s:5s]", start: 120000,
533expected: []*storage.SelectHints{
534{Start: 95000, End: 120000, Func: "sum", By: true, Step: 5000},
535{Start: 95000, End: 120000, Func: "max", By: true, Step: 5000},
536},
537}, {
538query: "foo @ 50 + bar @ 250 + baz @ 900", start: 100000, end: 500000,
539expected: []*storage.SelectHints{
540{Start: 45000, End: 50000, Step: 1000},
541{Start: 245000, End: 250000, Step: 1000},
542{Start: 895000, End: 900000, Step: 1000},
543},
544}, {
545query: "foo @ 50 + bar + baz @ 900", start: 100000, end: 500000,
546expected: []*storage.SelectHints{
547{Start: 45000, End: 50000, Step: 1000},
548{Start: 95000, End: 500000, Step: 1000},
549{Start: 895000, End: 900000, Step: 1000},
550},
551}, {
552query: "rate(foo[2s] @ 50) + bar @ 250 + baz @ 900", start: 100000, end: 500000,
553expected: []*storage.SelectHints{
554{Start: 48000, End: 50000, Step: 1000, Func: "rate", Range: 2000},
555{Start: 245000, End: 250000, Step: 1000},
556{Start: 895000, End: 900000, Step: 1000},
557},
558}, {
559query: "rate(foo[2s:1s] @ 50) + bar + baz", start: 100000, end: 500000,
560expected: []*storage.SelectHints{
561{Start: 43000, End: 50000, Step: 1000, Func: "rate"},
562{Start: 95000, End: 500000, Step: 1000},
563{Start: 95000, End: 500000, Step: 1000},
564},
565}, {
566query: "rate(foo[2s:1s] @ 50) + bar + rate(baz[2m:1s] @ 900 offset 2m) ", start: 100000, end: 500000,
567expected: []*storage.SelectHints{
568{Start: 43000, End: 50000, Step: 1000, Func: "rate"},
569{Start: 95000, End: 500000, Step: 1000},
570{Start: 655000, End: 780000, Step: 1000, Func: "rate"},
571},
572}, { // Hints are based on the inner most subquery timestamp.
573query: `sum_over_time(sum_over_time(metric{job="1"}[100s])[100s:25s] @ 50)[3s:1s] @ 3000`, start: 100000,
574expected: []*storage.SelectHints{
575{Start: -150000, End: 50000, Range: 100000, Func: "sum_over_time", Step: 25000},
576},
577}, { // Hints are based on the inner most subquery timestamp.
578query: `sum_over_time(sum_over_time(metric{job="1"}[100s])[100s:25s] @ 3000)[3s:1s] @ 50`,
579expected: []*storage.SelectHints{
580{Start: 2800000, End: 3000000, Range: 100000, Func: "sum_over_time", Step: 25000},
581},
582},
583} {
584t.Run(tc.query, func(t *testing.T) {
585engine := promql.NewEngine(opts)
586hintsRecorder := &noopHintRecordingQueryable{}
587
588var (
589query promql.Query
590err error
591)
592ctx := context.Background()
593
594if tc.end == 0 {
595query, err = engine.NewInstantQuery(ctx, hintsRecorder, nil, tc.query, timestamp.Time(tc.start))
596} else {
597query, err = engine.NewRangeQuery(ctx, hintsRecorder, nil, tc.query, timestamp.Time(tc.start), timestamp.Time(tc.end), time.Second)
598}
599require.NoError(t, err)
600
601res := query.Exec(context.Background())
602require.NoError(t, res.Err)
603
604require.Equal(t, tc.expected, hintsRecorder.hints)
605})
606}
607}
608
609func TestEngineShutdown(t *testing.T) {
610opts := promql.EngineOpts{
611Logger: nil,
612Reg: nil,
613MaxSamples: 10,
614Timeout: 10 * time.Second,
615}
616engine := promql.NewEngine(opts)
617ctx, cancelCtx := context.WithCancel(context.Background())
618
619block := make(chan struct{})
620processing := make(chan struct{})
621
622// Shutdown engine on first handler execution. Should handler execution ever become
623// concurrent this test has to be adjusted accordingly.
624f := func(ctx context.Context) error {
625processing <- struct{}{}
626<-block
627return contextDone(ctx, "test statement execution")
628}
629query1 := engine.NewTestQuery(f)
630
631// Stopping the engine must cancel the base context. While executing queries is
632// still possible, their context is canceled from the beginning and execution should
633// terminate immediately.
634
635var res *promql.Result
636go func() {
637res = query1.Exec(ctx)
638processing <- struct{}{}
639}()
640
641<-processing
642cancelCtx()
643block <- struct{}{}
644<-processing
645
646require.Error(t, res.Err, "expected error on shutdown during query but got none")
647require.Equal(t, errQueryCanceled, res.Err)
648
649query2 := engine.NewTestQuery(func(context.Context) error {
650require.FailNow(t, "reached query execution unexpectedly")
651return nil
652})
653
654// The second query is started after the engine shut down. It must
655// be canceled immediately.
656res2 := query2.Exec(ctx)
657require.Error(t, res2.Err, "expected error on querying with canceled context but got none")
658
659var e promql.ErrQueryCanceled
660require.ErrorAs(t, res2.Err, &e, "expected cancellation error but got: %s", res2.Err)
661}
662
663func TestEngineEvalStmtTimestamps(t *testing.T) {
664storage := promqltest.LoadedStorage(t, `
665load 10s
666metric 1 2
667`)
668t.Cleanup(func() { storage.Close() })
669
670cases := []struct {
671Query string
672Result parser.Value
673Start time.Time
674End time.Time
675Interval time.Duration
676ShouldError bool
677}{
678// Instant queries.
679{
680Query: "1",
681Result: promql.Scalar{V: 1, T: 1000},
682Start: time.Unix(1, 0),
683},
684{
685Query: "metric",
686Result: promql.Vector{
687promql.Sample{
688F: 1,
689T: 1000,
690Metric: labels.FromStrings("__name__", "metric"),
691},
692},
693Start: time.Unix(1, 0),
694},
695{
696Query: "metric[20s]",
697Result: promql.Matrix{
698promql.Series{
699Floats: []promql.FPoint{{F: 1, T: 0}, {F: 2, T: 10000}},
700Metric: labels.FromStrings("__name__", "metric"),
701},
702},
703Start: time.Unix(10, 0),
704},
705// Range queries.
706{
707Query: "1",
708Result: promql.Matrix{
709promql.Series{
710Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 1000}, {F: 1, T: 2000}},
711Metric: labels.EmptyLabels(),
712},
713},
714Start: time.Unix(0, 0),
715End: time.Unix(2, 0),
716Interval: time.Second,
717},
718{
719Query: "metric",
720Result: promql.Matrix{
721promql.Series{
722Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 1000}, {F: 1, T: 2000}},
723Metric: labels.FromStrings("__name__", "metric"),
724},
725},
726Start: time.Unix(0, 0),
727End: time.Unix(2, 0),
728Interval: time.Second,
729},
730{
731Query: "metric",
732Result: promql.Matrix{
733promql.Series{
734Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
735Metric: labels.FromStrings("__name__", "metric"),
736},
737},
738Start: time.Unix(0, 0),
739End: time.Unix(10, 0),
740Interval: 5 * time.Second,
741},
742{
743Query: `count_values("wrong label!", metric)`,
744ShouldError: true,
745},
746}
747
748for i, c := range cases {
749t.Run(fmt.Sprintf("%d query=%s", i, c.Query), func(t *testing.T) {
750var err error
751var qry promql.Query
752engine := newTestEngine()
753if c.Interval == 0 {
754qry, err = engine.NewInstantQuery(context.Background(), storage, nil, c.Query, c.Start)
755} else {
756qry, err = engine.NewRangeQuery(context.Background(), storage, nil, c.Query, c.Start, c.End, c.Interval)
757}
758require.NoError(t, err)
759
760res := qry.Exec(context.Background())
761if c.ShouldError {
762require.Error(t, res.Err, "expected error for the query %q", c.Query)
763return
764}
765
766require.NoError(t, res.Err)
767require.Equal(t, c.Result, res.Value, "query %q failed", c.Query)
768})
769}
770}
771
772func TestQueryStatistics(t *testing.T) {
773storage := promqltest.LoadedStorage(t, `
774load 10s
775metricWith1SampleEvery10Seconds 1+1x100
776metricWith3SampleEvery10Seconds{a="1",b="1"} 1+1x100
777metricWith3SampleEvery10Seconds{a="2",b="2"} 1+1x100
778metricWith3SampleEvery10Seconds{a="3",b="2"} 1+1x100
779metricWith1HistogramEvery10Seconds {{schema:1 count:5 sum:20 buckets:[1 2 1 1]}}+{{schema:1 count:10 sum:5 buckets:[1 2 3 4]}}x100
780`)
781t.Cleanup(func() { storage.Close() })
782
783cases := []struct {
784Query string
785SkipMaxCheck bool
786TotalSamples int64
787TotalSamplesPerStep stats.TotalSamplesPerStep
788PeakSamples int
789Start time.Time
790End time.Time
791Interval time.Duration
792}{
793{
794Query: `"literal string"`,
795SkipMaxCheck: true, // This can't fail from a max samples limit.
796Start: time.Unix(21, 0),
797TotalSamples: 0,
798TotalSamplesPerStep: stats.TotalSamplesPerStep{
79921000: 0,
800},
801},
802{
803Query: "1",
804Start: time.Unix(21, 0),
805TotalSamples: 0,
806PeakSamples: 1,
807TotalSamplesPerStep: stats.TotalSamplesPerStep{
80821000: 0,
809},
810},
811{
812Query: "metricWith1SampleEvery10Seconds",
813Start: time.Unix(21, 0),
814PeakSamples: 1,
815TotalSamples: 1, // 1 sample / 10 seconds
816TotalSamplesPerStep: stats.TotalSamplesPerStep{
81721000: 1,
818},
819},
820{
821Query: "metricWith1HistogramEvery10Seconds",
822Start: time.Unix(21, 0),
823PeakSamples: 12,
824TotalSamples: 12, // 1 histogram sample of size 12 / 10 seconds
825TotalSamplesPerStep: stats.TotalSamplesPerStep{
82621000: 12,
827},
828},
829{
830// timestamp function has a special handling.
831Query: "timestamp(metricWith1SampleEvery10Seconds)",
832Start: time.Unix(21, 0),
833PeakSamples: 2,
834TotalSamples: 1, // 1 sample / 10 seconds
835TotalSamplesPerStep: stats.TotalSamplesPerStep{
83621000: 1,
837},
838},
839{
840Query: "timestamp(metricWith1HistogramEvery10Seconds)",
841Start: time.Unix(21, 0),
842PeakSamples: 2,
843TotalSamples: 1, // 1 float sample (because of timestamp) / 10 seconds
844TotalSamplesPerStep: stats.TotalSamplesPerStep{
84521000: 1,
846},
847},
848{
849Query: "metricWith1SampleEvery10Seconds",
850Start: time.Unix(22, 0),
851PeakSamples: 1,
852TotalSamples: 1, // 1 sample / 10 seconds
853TotalSamplesPerStep: stats.TotalSamplesPerStep{
85422000: 1, // Aligned to the step time, not the sample time.
855},
856},
857{
858Query: "metricWith1SampleEvery10Seconds offset 10s",
859Start: time.Unix(21, 0),
860PeakSamples: 1,
861TotalSamples: 1, // 1 sample / 10 seconds
862TotalSamplesPerStep: stats.TotalSamplesPerStep{
86321000: 1,
864},
865},
866{
867Query: "metricWith1SampleEvery10Seconds @ 15",
868Start: time.Unix(21, 0),
869PeakSamples: 1,
870TotalSamples: 1, // 1 sample / 10 seconds
871TotalSamplesPerStep: stats.TotalSamplesPerStep{
87221000: 1,
873},
874},
875{
876Query: `metricWith3SampleEvery10Seconds{a="1"}`,
877Start: time.Unix(21, 0),
878PeakSamples: 1,
879TotalSamples: 1, // 1 sample / 10 seconds
880TotalSamplesPerStep: stats.TotalSamplesPerStep{
88121000: 1,
882},
883},
884{
885Query: `metricWith3SampleEvery10Seconds{a="1"} @ 19`,
886Start: time.Unix(21, 0),
887PeakSamples: 1,
888TotalSamples: 1, // 1 sample / 10 seconds
889TotalSamplesPerStep: stats.TotalSamplesPerStep{
89021000: 1,
891},
892},
893{
894Query: `metricWith3SampleEvery10Seconds{a="1"}[20s] @ 19`,
895Start: time.Unix(21, 0),
896PeakSamples: 2,
897TotalSamples: 2, // (1 sample / 10 seconds) * 20s
898TotalSamplesPerStep: stats.TotalSamplesPerStep{
89921000: 2,
900},
901},
902{
903Query: "metricWith3SampleEvery10Seconds",
904Start: time.Unix(21, 0),
905PeakSamples: 3,
906TotalSamples: 3, // 3 samples / 10 seconds
907TotalSamplesPerStep: stats.TotalSamplesPerStep{
90821000: 3,
909},
910},
911{
912Query: "metricWith1SampleEvery10Seconds[60s]",
913Start: time.Unix(201, 0),
914PeakSamples: 6,
915TotalSamples: 6, // 1 sample / 10 seconds * 60 seconds
916TotalSamplesPerStep: stats.TotalSamplesPerStep{
917201000: 6,
918},
919},
920{
921Query: "metricWith1HistogramEvery10Seconds[60s]",
922Start: time.Unix(201, 0),
923PeakSamples: 72,
924TotalSamples: 72, // 1 histogram (size 12) / 10 seconds * 60 seconds
925TotalSamplesPerStep: stats.TotalSamplesPerStep{
926201000: 72,
927},
928},
929{
930Query: "max_over_time(metricWith1SampleEvery10Seconds[59s])[20s:5s]",
931Start: time.Unix(201, 0),
932PeakSamples: 10,
933TotalSamples: 24, // (1 sample / 10 seconds * 60 seconds) * 20/5 (using 59s so we always return 6 samples
934// as if we run a query on 00 looking back 60 seconds we will return 7 samples;
935// see next test).
936TotalSamplesPerStep: stats.TotalSamplesPerStep{
937201000: 24,
938},
939},
940{
941Query: "max_over_time(metricWith1SampleEvery10Seconds[60s])[20s:5s]",
942Start: time.Unix(201, 0),
943PeakSamples: 11,
944TotalSamples: 26, // (1 sample / 10 seconds * 60 seconds) * 4 + 2 as
945// max_over_time(metricWith1SampleEvery10Seconds[60s]) @ 190 and 200 will return 7 samples.
946TotalSamplesPerStep: stats.TotalSamplesPerStep{
947201000: 26,
948},
949},
950{
951Query: "max_over_time(metricWith1HistogramEvery10Seconds[60s])[20s:5s]",
952Start: time.Unix(201, 0),
953PeakSamples: 72,
954TotalSamples: 312, // (1 histogram (size 12) / 10 seconds * 60 seconds) * 4 + 2 * 12 as
955// max_over_time(metricWith1SampleEvery10Seconds[60s]) @ 190 and 200 will return 7 samples.
956TotalSamplesPerStep: stats.TotalSamplesPerStep{
957201000: 312,
958},
959},
960{
961Query: "metricWith1SampleEvery10Seconds[60s] @ 30",
962Start: time.Unix(201, 0),
963PeakSamples: 4,
964TotalSamples: 4, // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 1 series
965TotalSamplesPerStep: stats.TotalSamplesPerStep{
966201000: 4,
967},
968},
969{
970Query: "metricWith1HistogramEvery10Seconds[60s] @ 30",
971Start: time.Unix(201, 0),
972PeakSamples: 48,
973TotalSamples: 48, // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 1 series
974TotalSamplesPerStep: stats.TotalSamplesPerStep{
975201000: 48,
976},
977},
978{
979Query: "sum(max_over_time(metricWith3SampleEvery10Seconds[60s] @ 30))",
980Start: time.Unix(201, 0),
981PeakSamples: 7,
982TotalSamples: 12, // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 3 series
983TotalSamplesPerStep: stats.TotalSamplesPerStep{
984201000: 12,
985},
986},
987{
988Query: "sum by (b) (max_over_time(metricWith3SampleEvery10Seconds[60s] @ 30))",
989Start: time.Unix(201, 0),
990PeakSamples: 7,
991TotalSamples: 12, // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 3 series
992TotalSamplesPerStep: stats.TotalSamplesPerStep{
993201000: 12,
994},
995},
996{
997Query: "metricWith1SampleEvery10Seconds[60s] offset 10s",
998Start: time.Unix(201, 0),
999PeakSamples: 6,
1000TotalSamples: 6, // 1 sample / 10 seconds * 60 seconds
1001TotalSamplesPerStep: stats.TotalSamplesPerStep{
1002201000: 6,
1003},
1004},
1005{
1006Query: "metricWith3SampleEvery10Seconds[60s]",
1007Start: time.Unix(201, 0),
1008PeakSamples: 18,
1009TotalSamples: 18, // 3 sample / 10 seconds * 60 seconds
1010TotalSamplesPerStep: stats.TotalSamplesPerStep{
1011201000: 18,
1012},
1013},
1014{
1015Query: "max_over_time(metricWith1SampleEvery10Seconds[60s])",
1016Start: time.Unix(201, 0),
1017PeakSamples: 7,
1018TotalSamples: 6, // 1 sample / 10 seconds * 60 seconds
1019TotalSamplesPerStep: stats.TotalSamplesPerStep{
1020201000: 6,
1021},
1022},
1023{
1024Query: "absent_over_time(metricWith1SampleEvery10Seconds[60s])",
1025Start: time.Unix(201, 0),
1026PeakSamples: 7,
1027TotalSamples: 6, // 1 sample / 10 seconds * 60 seconds
1028TotalSamplesPerStep: stats.TotalSamplesPerStep{
1029201000: 6,
1030},
1031},
1032{
1033Query: "max_over_time(metricWith3SampleEvery10Seconds[60s])",
1034Start: time.Unix(201, 0),
1035PeakSamples: 9,
1036TotalSamples: 18, // 3 sample / 10 seconds * 60 seconds
1037TotalSamplesPerStep: stats.TotalSamplesPerStep{
1038201000: 18,
1039},
1040},
1041{
1042Query: "metricWith1SampleEvery10Seconds[60s:5s]",
1043Start: time.Unix(201, 0),
1044PeakSamples: 12,
1045TotalSamples: 12, // 1 sample per query * 12 queries (60/5)
1046TotalSamplesPerStep: stats.TotalSamplesPerStep{
1047201000: 12,
1048},
1049},
1050{
1051Query: "metricWith1SampleEvery10Seconds[60s:5s] offset 10s",
1052Start: time.Unix(201, 0),
1053PeakSamples: 12,
1054TotalSamples: 12, // 1 sample per query * 12 queries (60/5)
1055TotalSamplesPerStep: stats.TotalSamplesPerStep{
1056201000: 12,
1057},
1058},
1059{
1060Query: "max_over_time(metricWith3SampleEvery10Seconds[60s:5s])",
1061Start: time.Unix(201, 0),
1062PeakSamples: 51,
1063TotalSamples: 36, // 3 sample per query * 12 queries (60/5)
1064TotalSamplesPerStep: stats.TotalSamplesPerStep{
1065201000: 36,
1066},
1067},
1068{
1069Query: "sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s])) + sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s]))",
1070Start: time.Unix(201, 0),
1071PeakSamples: 52,
1072TotalSamples: 72, // 2 * (3 sample per query * 12 queries (60/5))
1073TotalSamplesPerStep: stats.TotalSamplesPerStep{
1074201000: 72,
1075},
1076},
1077{
1078Query: `metricWith3SampleEvery10Seconds{a="1"}`,
1079Start: time.Unix(201, 0),
1080End: time.Unix(220, 0),
1081Interval: 5 * time.Second,
1082PeakSamples: 4,
1083TotalSamples: 4, // 1 sample per query * 4 steps
1084TotalSamplesPerStep: stats.TotalSamplesPerStep{
1085201000: 1,
1086206000: 1,
1087211000: 1,
1088216000: 1,
1089},
1090},
1091{
1092Query: `metricWith3SampleEvery10Seconds{a="1"}`,
1093Start: time.Unix(204, 0),
1094End: time.Unix(223, 0),
1095Interval: 5 * time.Second,
1096PeakSamples: 4,
1097TotalSamples: 4, // 1 sample per query * 4 steps
1098TotalSamplesPerStep: stats.TotalSamplesPerStep{
1099204000: 1, // aligned to the step time, not the sample time
1100209000: 1,
1101214000: 1,
1102219000: 1,
1103},
1104},
1105{
1106Query: `metricWith1HistogramEvery10Seconds`,
1107Start: time.Unix(204, 0),
1108End: time.Unix(223, 0),
1109Interval: 5 * time.Second,
1110PeakSamples: 48,
1111TotalSamples: 48, // 1 histogram (size 12) per query * 4 steps
1112TotalSamplesPerStep: stats.TotalSamplesPerStep{
1113204000: 12, // aligned to the step time, not the sample time
1114209000: 12,
1115214000: 12,
1116219000: 12,
1117},
1118},
1119{
1120// timestamp function has a special handling
1121Query: "timestamp(metricWith1SampleEvery10Seconds)",
1122Start: time.Unix(201, 0),
1123End: time.Unix(220, 0),
1124Interval: 5 * time.Second,
1125PeakSamples: 5,
1126TotalSamples: 4, // 1 sample per query * 4 steps
1127TotalSamplesPerStep: stats.TotalSamplesPerStep{
1128201000: 1,
1129206000: 1,
1130211000: 1,
1131216000: 1,
1132},
1133},
1134{
1135// timestamp function has a special handling
1136Query: "timestamp(metricWith1HistogramEvery10Seconds)",
1137Start: time.Unix(201, 0),
1138End: time.Unix(220, 0),
1139Interval: 5 * time.Second,
1140PeakSamples: 5,
1141TotalSamples: 4, // 1 sample per query * 4 steps
1142TotalSamplesPerStep: stats.TotalSamplesPerStep{
1143201000: 1,
1144206000: 1,
1145211000: 1,
1146216000: 1,
1147},
1148},
1149{
1150Query: `max_over_time(metricWith3SampleEvery10Seconds{a="1"}[10s])`,
1151Start: time.Unix(991, 0),
1152End: time.Unix(1021, 0),
1153Interval: 10 * time.Second,
1154PeakSamples: 2,
1155TotalSamples: 2, // 1 sample per query * 2 steps with data
1156TotalSamplesPerStep: stats.TotalSamplesPerStep{
1157991000: 1,
11581001000: 1,
11591011000: 0,
11601021000: 0,
1161},
1162},
1163{
1164Query: `metricWith3SampleEvery10Seconds{a="1"} offset 10s`,
1165Start: time.Unix(201, 0),
1166End: time.Unix(220, 0),
1167Interval: 5 * time.Second,
1168PeakSamples: 4,
1169TotalSamples: 4, // 1 sample per query * 4 steps
1170TotalSamplesPerStep: stats.TotalSamplesPerStep{
1171201000: 1,
1172206000: 1,
1173211000: 1,
1174216000: 1,
1175},
1176},
1177{
1178Query: "max_over_time(metricWith3SampleEvery10Seconds[60s] @ 30)",
1179Start: time.Unix(201, 0),
1180End: time.Unix(220, 0),
1181Interval: 5 * time.Second,
1182PeakSamples: 12,
1183TotalSamples: 48, // @ modifier force the evaluation timestamp at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 3 series * 4 steps
1184TotalSamplesPerStep: stats.TotalSamplesPerStep{
1185201000: 12,
1186206000: 12,
1187211000: 12,
1188216000: 12,
1189},
1190},
1191{
1192Query: `metricWith3SampleEvery10Seconds`,
1193Start: time.Unix(201, 0),
1194End: time.Unix(220, 0),
1195PeakSamples: 12,
1196Interval: 5 * time.Second,
1197TotalSamples: 12, // 3 sample per query * 4 steps
1198TotalSamplesPerStep: stats.TotalSamplesPerStep{
1199201000: 3,
1200206000: 3,
1201211000: 3,
1202216000: 3,
1203},
1204},
1205{
1206Query: `max_over_time(metricWith3SampleEvery10Seconds[60s])`,
1207Start: time.Unix(201, 0),
1208End: time.Unix(220, 0),
1209Interval: 5 * time.Second,
1210PeakSamples: 18,
1211TotalSamples: 72, // (3 sample / 10 seconds * 60 seconds) * 4 steps = 72
1212TotalSamplesPerStep: stats.TotalSamplesPerStep{
1213201000: 18,
1214206000: 18,
1215211000: 18,
1216216000: 18,
1217},
1218},
1219{
1220Query: "max_over_time(metricWith3SampleEvery10Seconds[60s:5s])",
1221Start: time.Unix(201, 0),
1222End: time.Unix(220, 0),
1223Interval: 5 * time.Second,
1224PeakSamples: 72,
1225TotalSamples: 144, // 3 sample per query * 12 queries (60/5) * 4 steps
1226TotalSamplesPerStep: stats.TotalSamplesPerStep{
1227201000: 36,
1228206000: 36,
1229211000: 36,
1230216000: 36,
1231},
1232},
1233{
1234Query: "max_over_time(metricWith1SampleEvery10Seconds[60s:5s])",
1235Start: time.Unix(201, 0),
1236End: time.Unix(220, 0),
1237Interval: 5 * time.Second,
1238PeakSamples: 32,
1239TotalSamples: 48, // 1 sample per query * 12 queries (60/5) * 4 steps
1240TotalSamplesPerStep: stats.TotalSamplesPerStep{
1241201000: 12,
1242206000: 12,
1243211000: 12,
1244216000: 12,
1245},
1246},
1247{
1248Query: "sum by (b) (max_over_time(metricWith1SampleEvery10Seconds[60s:5s]))",
1249Start: time.Unix(201, 0),
1250End: time.Unix(220, 0),
1251Interval: 5 * time.Second,
1252PeakSamples: 32,
1253TotalSamples: 48, // 1 sample per query * 12 queries (60/5) * 4 steps
1254TotalSamplesPerStep: stats.TotalSamplesPerStep{
1255201000: 12,
1256206000: 12,
1257211000: 12,
1258216000: 12,
1259},
1260},
1261{
1262Query: "sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s])) + sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s]))",
1263Start: time.Unix(201, 0),
1264End: time.Unix(220, 0),
1265Interval: 5 * time.Second,
1266PeakSamples: 76,
1267TotalSamples: 288, // 2 * (3 sample per query * 12 queries (60/5) * 4 steps)
1268TotalSamplesPerStep: stats.TotalSamplesPerStep{
1269201000: 72,
1270206000: 72,
1271211000: 72,
1272216000: 72,
1273},
1274},
1275{
1276Query: "sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s])) + sum(max_over_time(metricWith1SampleEvery10Seconds[60s:5s]))",
1277Start: time.Unix(201, 0),
1278End: time.Unix(220, 0),
1279Interval: 5 * time.Second,
1280PeakSamples: 72,
1281TotalSamples: 192, // (1 sample per query * 12 queries (60/5) + 3 sample per query * 12 queries (60/5)) * 4 steps
1282TotalSamplesPerStep: stats.TotalSamplesPerStep{
1283201000: 48,
1284206000: 48,
1285211000: 48,
1286216000: 48,
1287},
1288},
1289}
1290
1291for _, c := range cases {
1292t.Run(c.Query, func(t *testing.T) {
1293opts := promql.NewPrometheusQueryOpts(true, 0)
1294engine := promqltest.NewTestEngine(true, 0, promqltest.DefaultMaxSamplesPerQuery)
1295
1296runQuery := func(expErr error) *stats.Statistics {
1297var err error
1298var qry promql.Query
1299if c.Interval == 0 {
1300qry, err = engine.NewInstantQuery(context.Background(), storage, opts, c.Query, c.Start)
1301} else {
1302qry, err = engine.NewRangeQuery(context.Background(), storage, opts, c.Query, c.Start, c.End, c.Interval)
1303}
1304require.NoError(t, err)
1305
1306res := qry.Exec(context.Background())
1307require.Equal(t, expErr, res.Err)
1308
1309return qry.Stats()
1310}
1311
1312stats := runQuery(nil)
1313require.Equal(t, c.TotalSamples, stats.Samples.TotalSamples, "Total samples mismatch")
1314require.Equal(t, &c.TotalSamplesPerStep, stats.Samples.TotalSamplesPerStepMap(), "Total samples per time mismatch")
1315require.Equal(t, c.PeakSamples, stats.Samples.PeakSamples, "Peak samples mismatch")
1316
1317// Check that the peak is correct by setting the max to one less.
1318if c.SkipMaxCheck {
1319return
1320}
1321engine = promqltest.NewTestEngine(true, 0, stats.Samples.PeakSamples-1)
1322runQuery(promql.ErrTooManySamples(env))
1323})
1324}
1325}
1326
1327func TestMaxQuerySamples(t *testing.T) {
1328storage := promqltest.LoadedStorage(t, `
1329load 10s
1330metric 1+1x100
1331bigmetric{a="1"} 1+1x100
1332bigmetric{a="2"} 1+1x100
1333`)
1334t.Cleanup(func() { storage.Close() })
1335
1336// These test cases should be touching the limit exactly (hence no exceeding).
1337// Exceeding the limit will be tested by doing -1 to the MaxSamples.
1338cases := []struct {
1339Query string
1340MaxSamples int
1341Start time.Time
1342End time.Time
1343Interval time.Duration
1344}{
1345// Instant queries.
1346{
1347Query: "1",
1348MaxSamples: 1,
1349Start: time.Unix(1, 0),
1350},
1351{
1352Query: "metric",
1353MaxSamples: 1,
1354Start: time.Unix(1, 0),
1355},
1356{
1357Query: "metric[20s]",
1358MaxSamples: 2,
1359Start: time.Unix(10, 0),
1360},
1361{
1362Query: "rate(metric[20s])",
1363MaxSamples: 3,
1364Start: time.Unix(10, 0),
1365},
1366{
1367Query: "metric[20s:5s]",
1368MaxSamples: 3,
1369Start: time.Unix(10, 0),
1370},
1371{
1372Query: "metric[20s] @ 10",
1373MaxSamples: 2,
1374Start: time.Unix(0, 0),
1375},
1376// Range queries.
1377{
1378Query: "1",
1379MaxSamples: 3,
1380Start: time.Unix(0, 0),
1381End: time.Unix(2, 0),
1382Interval: time.Second,
1383},
1384{
1385Query: "1",
1386MaxSamples: 3,
1387Start: time.Unix(0, 0),
1388End: time.Unix(2, 0),
1389Interval: time.Second,
1390},
1391{
1392Query: "metric",
1393MaxSamples: 3,
1394Start: time.Unix(0, 0),
1395End: time.Unix(2, 0),
1396Interval: time.Second,
1397},
1398{
1399Query: "metric",
1400MaxSamples: 3,
1401Start: time.Unix(0, 0),
1402End: time.Unix(10, 0),
1403Interval: 5 * time.Second,
1404},
1405{
1406Query: "rate(bigmetric[1s])",
1407MaxSamples: 1,
1408Start: time.Unix(0, 0),
1409End: time.Unix(10, 0),
1410Interval: 5 * time.Second,
1411},
1412{
1413// Result is duplicated, so @ also produces 3 samples.
1414Query: "metric @ 10",
1415MaxSamples: 3,
1416Start: time.Unix(0, 0),
1417End: time.Unix(10, 0),
1418Interval: 5 * time.Second,
1419},
1420{
1421// The peak samples in memory is during the first evaluation:
1422// - Subquery takes 22 samples, 11 for each bigmetric,
1423// - Result is calculated per series where the series samples is buffered, hence 11 more here.
1424// - The result of two series is added before the last series buffer is discarded, so 2 more here.
1425// Hence at peak it is 22 (subquery) + 11 (buffer of a series) + 2 (result from 2 series).
1426// The subquery samples and the buffer is discarded before duplicating.
1427Query: `rate(bigmetric[10s:1s] @ 10)`,
1428MaxSamples: 35,
1429Start: time.Unix(0, 0),
1430End: time.Unix(10, 0),
1431Interval: 5 * time.Second,
1432},
1433{
1434// Here the reasoning is same as above. But LHS and RHS are done one after another.
1435// So while one of them takes 35 samples at peak, we need to hold the 2 sample
1436// result of the other till then.
1437Query: `rate(bigmetric[10s:1s] @ 10) + rate(bigmetric[10s:1s] @ 30)`,
1438MaxSamples: 37,
1439Start: time.Unix(0, 0),
1440End: time.Unix(10, 0),
1441Interval: 5 * time.Second,
1442},
1443{
1444// promql.Sample as above but with only 1 part as step invariant.
1445// Here the peak is caused by the non-step invariant part as it touches more time range.
1446// Hence at peak it is 2*21 (subquery from 0s to 20s)
1447// + 11 (buffer of a series per evaluation)
1448// + 6 (result from 2 series at 3 eval times).
1449Query: `rate(bigmetric[10s:1s]) + rate(bigmetric[10s:1s] @ 30)`,
1450MaxSamples: 59,
1451Start: time.Unix(10, 0),
1452End: time.Unix(20, 0),
1453Interval: 5 * time.Second,
1454},
1455{
1456// Nested subquery.
1457// We saw that innermost rate takes 35 samples which is still the peak
1458// since the other two subqueries just duplicate the result.
1459Query: `rate(rate(bigmetric[10s:1s] @ 10)[100s:25s] @ 1000)[100s:20s] @ 2000`,
1460MaxSamples: 35,
1461Start: time.Unix(10, 0),
1462},
1463{
1464// Nested subquery.
1465// Now the outmost subquery produces more samples than inner most rate.
1466Query: `rate(rate(bigmetric[10s:1s] @ 10)[100s:25s] @ 1000)[17s:1s] @ 2000`,
1467MaxSamples: 36,
1468Start: time.Unix(10, 0),
1469},
1470}
1471
1472for _, c := range cases {
1473t.Run(c.Query, func(t *testing.T) {
1474engine := newTestEngine()
1475testFunc := func(expError error) {
1476var err error
1477var qry promql.Query
1478if c.Interval == 0 {
1479qry, err = engine.NewInstantQuery(context.Background(), storage, nil, c.Query, c.Start)
1480} else {
1481qry, err = engine.NewRangeQuery(context.Background(), storage, nil, c.Query, c.Start, c.End, c.Interval)
1482}
1483require.NoError(t, err)
1484
1485res := qry.Exec(context.Background())
1486stats := qry.Stats()
1487require.Equal(t, expError, res.Err)
1488require.NotNil(t, stats)
1489if expError == nil {
1490require.Equal(t, c.MaxSamples, stats.Samples.PeakSamples, "peak samples mismatch for query %q", c.Query)
1491}
1492}
1493
1494// Within limit.
1495engine = promqltest.NewTestEngine(false, 0, c.MaxSamples)
1496testFunc(nil)
1497
1498// Exceeding limit.
1499engine = promqltest.NewTestEngine(false, 0, c.MaxSamples-1)
1500testFunc(promql.ErrTooManySamples(env))
1501})
1502}
1503}
1504
1505func TestAtModifier(t *testing.T) {
1506engine := newTestEngine()
1507storage := promqltest.LoadedStorage(t, `
1508load 10s
1509metric{job="1"} 0+1x1000
1510metric{job="2"} 0+2x1000
1511metric_topk{instance="1"} 0+1x1000
1512metric_topk{instance="2"} 0+2x1000
1513metric_topk{instance="3"} 1000-1x1000
1514
1515load 1ms
1516metric_ms 0+1x10000
1517`)
1518t.Cleanup(func() { storage.Close() })
1519
1520lbls1 := labels.FromStrings("__name__", "metric", "job", "1")
1521lbls2 := labels.FromStrings("__name__", "metric", "job", "2")
1522lblstopk2 := labels.FromStrings("__name__", "metric_topk", "instance", "2")
1523lblstopk3 := labels.FromStrings("__name__", "metric_topk", "instance", "3")
1524lblsms := labels.FromStrings("__name__", "metric_ms")
1525lblsneg := labels.FromStrings("__name__", "metric_neg")
1526
1527// Add some samples with negative timestamp.
1528db := storage.DB
1529app := db.Appender(context.Background())
1530ref, err := app.Append(0, lblsneg, -1000000, 1000)
1531require.NoError(t, err)
1532for ts := int64(-1000000 + 1000); ts <= 0; ts += 1000 {
1533_, err := app.Append(ref, labels.EmptyLabels(), ts, -float64(ts/1000)+1)
1534require.NoError(t, err)
1535}
1536
1537// To test the fix for https://github.com/prometheus/prometheus/issues/8433.
1538_, err = app.Append(0, labels.FromStrings("__name__", "metric_timestamp"), 3600*1000, 1000)
1539require.NoError(t, err)
1540
1541require.NoError(t, app.Commit())
1542
1543cases := []struct {
1544query string
1545start, end, interval int64 // Time in seconds.
1546result parser.Value
1547}{
1548{ // Time of the result is the evaluation time.
1549query: `metric_neg @ 0`,
1550start: 100,
1551result: promql.Vector{
1552promql.Sample{F: 1, T: 100000, Metric: lblsneg},
1553},
1554}, {
1555query: `metric_neg @ -200`,
1556start: 100,
1557result: promql.Vector{
1558promql.Sample{F: 201, T: 100000, Metric: lblsneg},
1559},
1560}, {
1561query: `metric{job="2"} @ 50`,
1562start: -2, end: 2, interval: 1,
1563result: promql.Matrix{
1564promql.Series{
1565Floats: []promql.FPoint{{F: 10, T: -2000}, {F: 10, T: -1000}, {F: 10, T: 0}, {F: 10, T: 1000}, {F: 10, T: 2000}},
1566Metric: lbls2,
1567},
1568},
1569}, { // Timestamps for matrix selector does not depend on the evaluation time.
1570query: "metric[20s] @ 300",
1571start: 10,
1572result: promql.Matrix{
1573promql.Series{
1574Floats: []promql.FPoint{{F: 28, T: 280000}, {F: 29, T: 290000}, {F: 30, T: 300000}},
1575Metric: lbls1,
1576},
1577promql.Series{
1578Floats: []promql.FPoint{{F: 56, T: 280000}, {F: 58, T: 290000}, {F: 60, T: 300000}},
1579Metric: lbls2,
1580},
1581},
1582}, {
1583query: `metric_neg[2s] @ 0`,
1584start: 100,
1585result: promql.Matrix{
1586promql.Series{
1587Floats: []promql.FPoint{{F: 3, T: -2000}, {F: 2, T: -1000}, {F: 1, T: 0}},
1588Metric: lblsneg,
1589},
1590},
1591}, {
1592query: `metric_neg[3s] @ -500`,
1593start: 100,
1594result: promql.Matrix{
1595promql.Series{
1596Floats: []promql.FPoint{{F: 504, T: -503000}, {F: 503, T: -502000}, {F: 502, T: -501000}, {F: 501, T: -500000}},
1597Metric: lblsneg,
1598},
1599},
1600}, {
1601query: `metric_ms[3ms] @ 2.345`,
1602start: 100,
1603result: promql.Matrix{
1604promql.Series{
1605Floats: []promql.FPoint{{F: 2342, T: 2342}, {F: 2343, T: 2343}, {F: 2344, T: 2344}, {F: 2345, T: 2345}},
1606Metric: lblsms,
1607},
1608},
1609}, {
1610query: "metric[100s:25s] @ 300",
1611start: 100,
1612result: promql.Matrix{
1613promql.Series{
1614Floats: []promql.FPoint{{F: 20, T: 200000}, {F: 22, T: 225000}, {F: 25, T: 250000}, {F: 27, T: 275000}, {F: 30, T: 300000}},
1615Metric: lbls1,
1616},
1617promql.Series{
1618Floats: []promql.FPoint{{F: 40, T: 200000}, {F: 44, T: 225000}, {F: 50, T: 250000}, {F: 54, T: 275000}, {F: 60, T: 300000}},
1619Metric: lbls2,
1620},
1621},
1622}, {
1623query: "metric_neg[50s:25s] @ 0",
1624start: 100,
1625result: promql.Matrix{
1626promql.Series{
1627Floats: []promql.FPoint{{F: 51, T: -50000}, {F: 26, T: -25000}, {F: 1, T: 0}},
1628Metric: lblsneg,
1629},
1630},
1631}, {
1632query: "metric_neg[50s:25s] @ -100",
1633start: 100,
1634result: promql.Matrix{
1635promql.Series{
1636Floats: []promql.FPoint{{F: 151, T: -150000}, {F: 126, T: -125000}, {F: 101, T: -100000}},
1637Metric: lblsneg,
1638},
1639},
1640}, {
1641query: `metric_ms[100ms:25ms] @ 2.345`,
1642start: 100,
1643result: promql.Matrix{
1644promql.Series{
1645Floats: []promql.FPoint{{F: 2250, T: 2250}, {F: 2275, T: 2275}, {F: 2300, T: 2300}, {F: 2325, T: 2325}},
1646Metric: lblsms,
1647},
1648},
1649}, {
1650query: `metric_topk and topk(1, sum_over_time(metric_topk[50s] @ 100))`,
1651start: 50, end: 80, interval: 10,
1652result: promql.Matrix{
1653promql.Series{
1654Floats: []promql.FPoint{{F: 995, T: 50000}, {F: 994, T: 60000}, {F: 993, T: 70000}, {F: 992, T: 80000}},
1655Metric: lblstopk3,
1656},
1657},
1658}, {
1659query: `metric_topk and topk(1, sum_over_time(metric_topk[50s] @ 5000))`,
1660start: 50, end: 80, interval: 10,
1661result: promql.Matrix{
1662promql.Series{
1663Floats: []promql.FPoint{{F: 10, T: 50000}, {F: 12, T: 60000}, {F: 14, T: 70000}, {F: 16, T: 80000}},
1664Metric: lblstopk2,
1665},
1666},
1667}, {
1668query: `metric_topk and topk(1, sum_over_time(metric_topk[50s] @ end()))`,
1669start: 70, end: 100, interval: 10,
1670result: promql.Matrix{
1671promql.Series{
1672Floats: []promql.FPoint{{F: 993, T: 70000}, {F: 992, T: 80000}, {F: 991, T: 90000}, {F: 990, T: 100000}},
1673Metric: lblstopk3,
1674},
1675},
1676}, {
1677query: `metric_topk and topk(1, sum_over_time(metric_topk[50s] @ start()))`,
1678start: 100, end: 130, interval: 10,
1679result: promql.Matrix{
1680promql.Series{
1681Floats: []promql.FPoint{{F: 990, T: 100000}, {F: 989, T: 110000}, {F: 988, T: 120000}, {F: 987, T: 130000}},
1682Metric: lblstopk3,
1683},
1684},
1685}, {
1686// Tests for https://github.com/prometheus/prometheus/issues/8433.
1687// The trick here is that the query range should be > lookback delta.
1688query: `timestamp(metric_timestamp @ 3600)`,
1689start: 0, end: 7 * 60, interval: 60,
1690result: promql.Matrix{
1691promql.Series{
1692Floats: []promql.FPoint{
1693{F: 3600, T: 0},
1694{F: 3600, T: 60 * 1000},
1695{F: 3600, T: 2 * 60 * 1000},
1696{F: 3600, T: 3 * 60 * 1000},
1697{F: 3600, T: 4 * 60 * 1000},
1698{F: 3600, T: 5 * 60 * 1000},
1699{F: 3600, T: 6 * 60 * 1000},
1700{F: 3600, T: 7 * 60 * 1000},
1701},
1702Metric: labels.EmptyLabels(),
1703},
1704},
1705},
1706}
1707
1708for _, c := range cases {
1709t.Run(c.query, func(t *testing.T) {
1710if c.interval == 0 {
1711c.interval = 1
1712}
1713start, end, interval := time.Unix(c.start, 0), time.Unix(c.end, 0), time.Duration(c.interval)*time.Second
1714var err error
1715var qry promql.Query
1716if c.end == 0 {
1717qry, err = engine.NewInstantQuery(context.Background(), storage, nil, c.query, start)
1718} else {
1719qry, err = engine.NewRangeQuery(context.Background(), storage, nil, c.query, start, end, interval)
1720}
1721require.NoError(t, err)
1722
1723res := qry.Exec(context.Background())
1724require.NoError(t, res.Err)
1725if expMat, ok := c.result.(promql.Matrix); ok {
1726sort.Sort(expMat)
1727sort.Sort(res.Value.(promql.Matrix))
1728}
1729testutil.RequireEqual(t, c.result, res.Value, "query %q failed", c.query)
1730})
1731}
1732}
1733
1734func TestSubquerySelector(t *testing.T) {
1735type caseType struct {
1736Query string
1737Result promql.Result
1738Start time.Time
1739}
1740
1741for _, tst := range []struct {
1742loadString string
1743cases []caseType
1744}{
1745{
1746loadString: `load 10s
1747metric 1 2`,
1748cases: []caseType{
1749{
1750Query: "metric[20s:10s]",
1751Result: promql.Result{
1752nil,
1753promql.Matrix{
1754promql.Series{
1755Floats: []promql.FPoint{{F: 1, T: 0}, {F: 2, T: 10000}},
1756Metric: labels.FromStrings("__name__", "metric"),
1757},
1758},
1759nil,
1760},
1761Start: time.Unix(10, 0),
1762},
1763{
1764Query: "metric[20s:5s]",
1765Result: promql.Result{
1766nil,
1767promql.Matrix{
1768promql.Series{
1769Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
1770Metric: labels.FromStrings("__name__", "metric"),
1771},
1772},
1773nil,
1774},
1775Start: time.Unix(10, 0),
1776},
1777{
1778Query: "metric[20s:5s] offset 2s",
1779Result: promql.Result{
1780nil,
1781promql.Matrix{
1782promql.Series{
1783Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
1784Metric: labels.FromStrings("__name__", "metric"),
1785},
1786},
1787nil,
1788},
1789Start: time.Unix(12, 0),
1790},
1791{
1792Query: "metric[20s:5s] offset 6s",
1793Result: promql.Result{
1794nil,
1795promql.Matrix{
1796promql.Series{
1797Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
1798Metric: labels.FromStrings("__name__", "metric"),
1799},
1800},
1801nil,
1802},
1803Start: time.Unix(20, 0),
1804},
1805{
1806Query: "metric[20s:5s] offset 4s",
1807Result: promql.Result{
1808nil,
1809promql.Matrix{
1810promql.Series{
1811Floats: []promql.FPoint{{F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}, {F: 2, T: 30000}},
1812Metric: labels.FromStrings("__name__", "metric"),
1813},
1814},
1815nil,
1816},
1817Start: time.Unix(35, 0),
1818},
1819{
1820Query: "metric[20s:5s] offset 5s",
1821Result: promql.Result{
1822nil,
1823promql.Matrix{
1824promql.Series{
1825Floats: []promql.FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}, {F: 2, T: 30000}},
1826Metric: labels.FromStrings("__name__", "metric"),
1827},
1828},
1829nil,
1830},
1831Start: time.Unix(35, 0),
1832},
1833{
1834Query: "metric[20s:5s] offset 6s",
1835Result: promql.Result{
1836nil,
1837promql.Matrix{
1838promql.Series{
1839Floats: []promql.FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}},
1840Metric: labels.FromStrings("__name__", "metric"),
1841},
1842},
1843nil,
1844},
1845Start: time.Unix(35, 0),
1846},
1847{
1848Query: "metric[20s:5s] offset 7s",
1849Result: promql.Result{
1850nil,
1851promql.Matrix{
1852promql.Series{
1853Floats: []promql.FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}},
1854Metric: labels.FromStrings("__name__", "metric"),
1855},
1856},
1857nil,
1858},
1859Start: time.Unix(35, 0),
1860},
1861},
1862},
1863{
1864loadString: `load 10s
1865http_requests{job="api-server", instance="0", group="production"} 0+10x1000 100+30x1000
1866http_requests{job="api-server", instance="1", group="production"} 0+20x1000 200+30x1000
1867http_requests{job="api-server", instance="0", group="canary"} 0+30x1000 300+80x1000
1868http_requests{job="api-server", instance="1", group="canary"} 0+40x2000`,
1869cases: []caseType{
1870{ // Normal selector.
1871Query: `http_requests{group=~"pro.*",instance="0"}[30s:10s]`,
1872Result: promql.Result{
1873nil,
1874promql.Matrix{
1875promql.Series{
1876Floats: []promql.FPoint{{F: 9990, T: 9990000}, {F: 10000, T: 10000000}, {F: 100, T: 10010000}, {F: 130, T: 10020000}},
1877Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
1878},
1879},
1880nil,
1881},
1882Start: time.Unix(10020, 0),
1883},
1884{ // Default step.
1885Query: `http_requests{group=~"pro.*",instance="0"}[5m:]`,
1886Result: promql.Result{
1887nil,
1888promql.Matrix{
1889promql.Series{
1890Floats: []promql.FPoint{{F: 9840, T: 9840000}, {F: 9900, T: 9900000}, {F: 9960, T: 9960000}, {F: 130, T: 10020000}, {F: 310, T: 10080000}},
1891Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
1892},
1893},
1894nil,
1895},
1896Start: time.Unix(10100, 0),
1897},
1898{ // Checking if high offset (>LookbackDelta) is being taken care of.
1899Query: `http_requests{group=~"pro.*",instance="0"}[5m:] offset 20m`,
1900Result: promql.Result{
1901nil,
1902promql.Matrix{
1903promql.Series{
1904Floats: []promql.FPoint{{F: 8640, T: 8640000}, {F: 8700, T: 8700000}, {F: 8760, T: 8760000}, {F: 8820, T: 8820000}, {F: 8880, T: 8880000}},
1905Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
1906},
1907},
1908nil,
1909},
1910Start: time.Unix(10100, 0),
1911},
1912{
1913Query: `rate(http_requests[1m])[15s:5s]`,
1914Result: promql.Result{
1915nil,
1916promql.Matrix{
1917promql.Series{
1918Floats: []promql.FPoint{{F: 3, T: 7985000}, {F: 3, T: 7990000}, {F: 3, T: 7995000}, {F: 3, T: 8000000}},
1919Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "canary"),
1920},
1921promql.Series{
1922Floats: []promql.FPoint{{F: 4, T: 7985000}, {F: 4, T: 7990000}, {F: 4, T: 7995000}, {F: 4, T: 8000000}},
1923Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "canary"),
1924},
1925promql.Series{
1926Floats: []promql.FPoint{{F: 1, T: 7985000}, {F: 1, T: 7990000}, {F: 1, T: 7995000}, {F: 1, T: 8000000}},
1927Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "production"),
1928},
1929promql.Series{
1930Floats: []promql.FPoint{{F: 2, T: 7985000}, {F: 2, T: 7990000}, {F: 2, T: 7995000}, {F: 2, T: 8000000}},
1931Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "production"),
1932},
1933},
1934nil,
1935},
1936Start: time.Unix(8000, 0),
1937},
1938{
1939Query: `sum(http_requests{group=~"pro.*"})[30s:10s]`,
1940Result: promql.Result{
1941nil,
1942promql.Matrix{
1943promql.Series{
1944Floats: []promql.FPoint{{F: 270, T: 90000}, {F: 300, T: 100000}, {F: 330, T: 110000}, {F: 360, T: 120000}},
1945Metric: labels.EmptyLabels(),
1946},
1947},
1948nil,
1949},
1950Start: time.Unix(120, 0),
1951},
1952{
1953Query: `sum(http_requests)[40s:10s]`,
1954Result: promql.Result{
1955nil,
1956promql.Matrix{
1957promql.Series{
1958Floats: []promql.FPoint{{F: 800, T: 80000}, {F: 900, T: 90000}, {F: 1000, T: 100000}, {F: 1100, T: 110000}, {F: 1200, T: 120000}},
1959Metric: labels.EmptyLabels(),
1960},
1961},
1962nil,
1963},
1964Start: time.Unix(120, 0),
1965},
1966{
1967Query: `(sum(http_requests{group=~"p.*"})+sum(http_requests{group=~"c.*"}))[20s:5s]`,
1968Result: promql.Result{
1969nil,
1970promql.Matrix{
1971promql.Series{
1972Floats: []promql.FPoint{{F: 1000, T: 100000}, {F: 1000, T: 105000}, {F: 1100, T: 110000}, {F: 1100, T: 115000}, {F: 1200, T: 120000}},
1973Metric: labels.EmptyLabels(),
1974},
1975},
1976nil,
1977},
1978Start: time.Unix(120, 0),
1979},
1980},
1981},
1982} {
1983t.Run("", func(t *testing.T) {
1984engine := newTestEngine()
1985storage := promqltest.LoadedStorage(t, tst.loadString)
1986t.Cleanup(func() { storage.Close() })
1987
1988for _, c := range tst.cases {
1989t.Run(c.Query, func(t *testing.T) {
1990qry, err := engine.NewInstantQuery(context.Background(), storage, nil, c.Query, c.Start)
1991require.NoError(t, err)
1992
1993res := qry.Exec(context.Background())
1994require.Equal(t, c.Result.Err, res.Err)
1995mat := res.Value.(promql.Matrix)
1996sort.Sort(mat)
1997testutil.RequireEqual(t, c.Result.Value, mat)
1998})
1999}
2000})
2001}
2002}
2003
2004func TestTimestampFunction_StepsMoreOftenThanSamples(t *testing.T) {
2005engine := newTestEngine()
2006storage := promqltest.LoadedStorage(t, `
2007load 1m
2008metric 0+1x1000
2009`)
2010t.Cleanup(func() { storage.Close() })
2011
2012query := "timestamp(metric)"
2013start := time.Unix(0, 0)
2014end := time.Unix(61, 0)
2015interval := time.Second
2016
2017// We expect the value to be 0 for t=0s to t=59s (inclusive), then 60 for t=60s and t=61s.
2018expectedPoints := []promql.FPoint{}
2019
2020for t := 0; t <= 59; t++ {
2021expectedPoints = append(expectedPoints, promql.FPoint{F: 0, T: int64(t * 1000)})
2022}
2023
2024expectedPoints = append(
2025expectedPoints,
2026promql.FPoint{F: 60, T: 60_000},
2027promql.FPoint{F: 60, T: 61_000},
2028)
2029
2030expectedResult := promql.Matrix{
2031promql.Series{
2032Floats: expectedPoints,
2033Metric: labels.EmptyLabels(),
2034},
2035}
2036
2037qry, err := engine.NewRangeQuery(context.Background(), storage, nil, query, start, end, interval)
2038require.NoError(t, err)
2039
2040res := qry.Exec(context.Background())
2041require.NoError(t, res.Err)
2042testutil.RequireEqual(t, expectedResult, res.Value)
2043}
2044
2045type FakeQueryLogger struct {
2046closed bool
2047logs []interface{}
2048}
2049
2050func NewFakeQueryLogger() *FakeQueryLogger {
2051return &FakeQueryLogger{
2052closed: false,
2053logs: make([]interface{}, 0),
2054}
2055}
2056
2057func (f *FakeQueryLogger) Close() error {
2058f.closed = true
2059return nil
2060}
2061
2062func (f *FakeQueryLogger) Log(l ...interface{}) error {
2063f.logs = append(f.logs, l...)
2064return nil
2065}
2066
2067func TestQueryLogger_basic(t *testing.T) {
2068opts := promql.EngineOpts{
2069Logger: nil,
2070Reg: nil,
2071MaxSamples: 10,
2072Timeout: 10 * time.Second,
2073}
2074engine := promql.NewEngine(opts)
2075
2076queryExec := func() {
2077ctx, cancelCtx := context.WithCancel(context.Background())
2078defer cancelCtx()
2079query := engine.NewTestQuery(func(ctx context.Context) error {
2080return contextDone(ctx, "test statement execution")
2081})
2082res := query.Exec(ctx)
2083require.NoError(t, res.Err)
2084}
2085
2086// promql.Query works without query log initialized.
2087queryExec()
2088
2089f1 := NewFakeQueryLogger()
2090engine.SetQueryLogger(f1)
2091queryExec()
2092for i, field := range []interface{}{"params", map[string]interface{}{"query": "test statement"}} {
2093require.Equal(t, field, f1.logs[i])
2094}
2095
2096l := len(f1.logs)
2097queryExec()
2098require.Len(t, f1.logs, 2*l)
2099
2100// Test that we close the query logger when unsetting it.
2101require.False(t, f1.closed, "expected f1 to be open, got closed")
2102engine.SetQueryLogger(nil)
2103require.True(t, f1.closed, "expected f1 to be closed, got open")
2104queryExec()
2105
2106// Test that we close the query logger when swapping.
2107f2 := NewFakeQueryLogger()
2108f3 := NewFakeQueryLogger()
2109engine.SetQueryLogger(f2)
2110require.False(t, f2.closed, "expected f2 to be open, got closed")
2111queryExec()
2112engine.SetQueryLogger(f3)
2113require.True(t, f2.closed, "expected f2 to be closed, got open")
2114require.False(t, f3.closed, "expected f3 to be open, got closed")
2115queryExec()
2116}
2117
2118func TestQueryLogger_fields(t *testing.T) {
2119opts := promql.EngineOpts{
2120Logger: nil,
2121Reg: nil,
2122MaxSamples: 10,
2123Timeout: 10 * time.Second,
2124}
2125engine := promql.NewEngine(opts)
2126
2127f1 := NewFakeQueryLogger()
2128engine.SetQueryLogger(f1)
2129
2130ctx, cancelCtx := context.WithCancel(context.Background())
2131ctx = promql.NewOriginContext(ctx, map[string]interface{}{"foo": "bar"})
2132defer cancelCtx()
2133query := engine.NewTestQuery(func(ctx context.Context) error {
2134return contextDone(ctx, "test statement execution")
2135})
2136
2137res := query.Exec(ctx)
2138require.NoError(t, res.Err)
2139
2140expected := []string{"foo", "bar"}
2141for i, field := range expected {
2142v := f1.logs[len(f1.logs)-len(expected)+i].(string)
2143require.Equal(t, field, v)
2144}
2145}
2146
2147func TestQueryLogger_error(t *testing.T) {
2148opts := promql.EngineOpts{
2149Logger: nil,
2150Reg: nil,
2151MaxSamples: 10,
2152Timeout: 10 * time.Second,
2153}
2154engine := promql.NewEngine(opts)
2155
2156f1 := NewFakeQueryLogger()
2157engine.SetQueryLogger(f1)
2158
2159ctx, cancelCtx := context.WithCancel(context.Background())
2160ctx = promql.NewOriginContext(ctx, map[string]interface{}{"foo": "bar"})
2161defer cancelCtx()
2162testErr := errors.New("failure")
2163query := engine.NewTestQuery(func(ctx context.Context) error {
2164return testErr
2165})
2166
2167res := query.Exec(ctx)
2168require.Error(t, res.Err, "query should have failed")
2169
2170for i, field := range []interface{}{"params", map[string]interface{}{"query": "test statement"}, "error", testErr} {
2171require.Equal(t, f1.logs[i], field)
2172}
2173}
2174
2175func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) {
2176startTime := time.Unix(1000, 0)
2177endTime := time.Unix(9999, 0)
2178testCases := []struct {
2179input string // The input to be parsed.
2180expected parser.Expr // The expected expression AST.
2181outputTest bool
2182}{
2183{
2184input: "123.4567",
2185expected: &parser.StepInvariantExpr{
2186Expr: &parser.NumberLiteral{
2187Val: 123.4567,
2188PosRange: posrange.PositionRange{Start: 0, End: 8},
2189},
2190},
2191},
2192{
2193input: `"foo"`,
2194expected: &parser.StepInvariantExpr{
2195Expr: &parser.StringLiteral{
2196Val: "foo",
2197PosRange: posrange.PositionRange{Start: 0, End: 5},
2198},
2199},
2200},
2201{
2202input: "foo * bar",
2203expected: &parser.BinaryExpr{
2204Op: parser.MUL,
2205LHS: &parser.VectorSelector{
2206Name: "foo",
2207LabelMatchers: []*labels.Matcher{
2208parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2209},
2210PosRange: posrange.PositionRange{
2211Start: 0,
2212End: 3,
2213},
2214},
2215RHS: &parser.VectorSelector{
2216Name: "bar",
2217LabelMatchers: []*labels.Matcher{
2218parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"),
2219},
2220PosRange: posrange.PositionRange{
2221Start: 6,
2222End: 9,
2223},
2224},
2225VectorMatching: &parser.VectorMatching{Card: parser.CardOneToOne},
2226},
2227},
2228{
2229input: "foo * bar @ 10",
2230expected: &parser.BinaryExpr{
2231Op: parser.MUL,
2232LHS: &parser.VectorSelector{
2233Name: "foo",
2234LabelMatchers: []*labels.Matcher{
2235parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2236},
2237PosRange: posrange.PositionRange{
2238Start: 0,
2239End: 3,
2240},
2241},
2242RHS: &parser.StepInvariantExpr{
2243Expr: &parser.VectorSelector{
2244Name: "bar",
2245LabelMatchers: []*labels.Matcher{
2246parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"),
2247},
2248PosRange: posrange.PositionRange{
2249Start: 6,
2250End: 14,
2251},
2252Timestamp: makeInt64Pointer(10000),
2253},
2254},
2255VectorMatching: &parser.VectorMatching{Card: parser.CardOneToOne},
2256},
2257},
2258{
2259input: "foo @ 20 * bar @ 10",
2260expected: &parser.StepInvariantExpr{
2261Expr: &parser.BinaryExpr{
2262Op: parser.MUL,
2263LHS: &parser.VectorSelector{
2264Name: "foo",
2265LabelMatchers: []*labels.Matcher{
2266parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2267},
2268PosRange: posrange.PositionRange{
2269Start: 0,
2270End: 8,
2271},
2272Timestamp: makeInt64Pointer(20000),
2273},
2274RHS: &parser.VectorSelector{
2275Name: "bar",
2276LabelMatchers: []*labels.Matcher{
2277parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"),
2278},
2279PosRange: posrange.PositionRange{
2280Start: 11,
2281End: 19,
2282},
2283Timestamp: makeInt64Pointer(10000),
2284},
2285VectorMatching: &parser.VectorMatching{Card: parser.CardOneToOne},
2286},
2287},
2288},
2289{
2290input: "test[5s]",
2291expected: &parser.MatrixSelector{
2292VectorSelector: &parser.VectorSelector{
2293Name: "test",
2294LabelMatchers: []*labels.Matcher{
2295parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"),
2296},
2297PosRange: posrange.PositionRange{
2298Start: 0,
2299End: 4,
2300},
2301},
2302Range: 5 * time.Second,
2303EndPos: 8,
2304},
2305},
2306{
2307input: `test{a="b"}[5y] @ 1603774699`,
2308expected: &parser.StepInvariantExpr{
2309Expr: &parser.MatrixSelector{
2310VectorSelector: &parser.VectorSelector{
2311Name: "test",
2312Timestamp: makeInt64Pointer(1603774699000),
2313LabelMatchers: []*labels.Matcher{
2314parser.MustLabelMatcher(labels.MatchEqual, "a", "b"),
2315parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"),
2316},
2317PosRange: posrange.PositionRange{
2318Start: 0,
2319End: 11,
2320},
2321},
2322Range: 5 * 365 * 24 * time.Hour,
2323EndPos: 28,
2324},
2325},
2326},
2327{
2328input: "sum by (foo)(some_metric)",
2329expected: &parser.AggregateExpr{
2330Op: parser.SUM,
2331Expr: &parser.VectorSelector{
2332Name: "some_metric",
2333LabelMatchers: []*labels.Matcher{
2334parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2335},
2336PosRange: posrange.PositionRange{
2337Start: 13,
2338End: 24,
2339},
2340},
2341Grouping: []string{"foo"},
2342PosRange: posrange.PositionRange{
2343Start: 0,
2344End: 25,
2345},
2346},
2347},
2348{
2349input: "sum by (foo)(some_metric @ 10)",
2350expected: &parser.StepInvariantExpr{
2351Expr: &parser.AggregateExpr{
2352Op: parser.SUM,
2353Expr: &parser.VectorSelector{
2354Name: "some_metric",
2355LabelMatchers: []*labels.Matcher{
2356parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2357},
2358PosRange: posrange.PositionRange{
2359Start: 13,
2360End: 29,
2361},
2362Timestamp: makeInt64Pointer(10000),
2363},
2364Grouping: []string{"foo"},
2365PosRange: posrange.PositionRange{
2366Start: 0,
2367End: 30,
2368},
2369},
2370},
2371},
2372{
2373input: "sum(some_metric1 @ 10) + sum(some_metric2 @ 20)",
2374expected: &parser.StepInvariantExpr{
2375Expr: &parser.BinaryExpr{
2376Op: parser.ADD,
2377VectorMatching: &parser.VectorMatching{},
2378LHS: &parser.AggregateExpr{
2379Op: parser.SUM,
2380Expr: &parser.VectorSelector{
2381Name: "some_metric1",
2382LabelMatchers: []*labels.Matcher{
2383parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric1"),
2384},
2385PosRange: posrange.PositionRange{
2386Start: 4,
2387End: 21,
2388},
2389Timestamp: makeInt64Pointer(10000),
2390},
2391PosRange: posrange.PositionRange{
2392Start: 0,
2393End: 22,
2394},
2395},
2396RHS: &parser.AggregateExpr{
2397Op: parser.SUM,
2398Expr: &parser.VectorSelector{
2399Name: "some_metric2",
2400LabelMatchers: []*labels.Matcher{
2401parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric2"),
2402},
2403PosRange: posrange.PositionRange{
2404Start: 29,
2405End: 46,
2406},
2407Timestamp: makeInt64Pointer(20000),
2408},
2409PosRange: posrange.PositionRange{
2410Start: 25,
2411End: 47,
2412},
2413},
2414},
2415},
2416},
2417{
2418input: "some_metric and topk(5, rate(some_metric[1m] @ 20))",
2419expected: &parser.BinaryExpr{
2420Op: parser.LAND,
2421VectorMatching: &parser.VectorMatching{
2422Card: parser.CardManyToMany,
2423},
2424LHS: &parser.VectorSelector{
2425Name: "some_metric",
2426LabelMatchers: []*labels.Matcher{
2427parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2428},
2429PosRange: posrange.PositionRange{
2430Start: 0,
2431End: 11,
2432},
2433},
2434RHS: &parser.StepInvariantExpr{
2435Expr: &parser.AggregateExpr{
2436Op: parser.TOPK,
2437Expr: &parser.Call{
2438Func: parser.MustGetFunction("rate"),
2439Args: parser.Expressions{
2440&parser.MatrixSelector{
2441VectorSelector: &parser.VectorSelector{
2442Name: "some_metric",
2443LabelMatchers: []*labels.Matcher{
2444parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2445},
2446PosRange: posrange.PositionRange{
2447Start: 29,
2448End: 40,
2449},
2450Timestamp: makeInt64Pointer(20000),
2451},
2452Range: 1 * time.Minute,
2453EndPos: 49,
2454},
2455},
2456PosRange: posrange.PositionRange{
2457Start: 24,
2458End: 50,
2459},
2460},
2461Param: &parser.NumberLiteral{
2462Val: 5,
2463PosRange: posrange.PositionRange{
2464Start: 21,
2465End: 22,
2466},
2467},
2468PosRange: posrange.PositionRange{
2469Start: 16,
2470End: 51,
2471},
2472},
2473},
2474},
2475},
2476{
2477input: "time()",
2478expected: &parser.Call{
2479Func: parser.MustGetFunction("time"),
2480Args: parser.Expressions{},
2481PosRange: posrange.PositionRange{
2482Start: 0,
2483End: 6,
2484},
2485},
2486},
2487{
2488input: `foo{bar="baz"}[10m:6s]`,
2489expected: &parser.SubqueryExpr{
2490Expr: &parser.VectorSelector{
2491Name: "foo",
2492LabelMatchers: []*labels.Matcher{
2493parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"),
2494parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2495},
2496PosRange: posrange.PositionRange{
2497Start: 0,
2498End: 14,
2499},
2500},
2501Range: 10 * time.Minute,
2502Step: 6 * time.Second,
2503EndPos: 22,
2504},
2505},
2506{
2507input: `foo{bar="baz"}[10m:6s] @ 10`,
2508expected: &parser.StepInvariantExpr{
2509Expr: &parser.SubqueryExpr{
2510Expr: &parser.VectorSelector{
2511Name: "foo",
2512LabelMatchers: []*labels.Matcher{
2513parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"),
2514parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2515},
2516PosRange: posrange.PositionRange{
2517Start: 0,
2518End: 14,
2519},
2520},
2521Range: 10 * time.Minute,
2522Step: 6 * time.Second,
2523Timestamp: makeInt64Pointer(10000),
2524EndPos: 27,
2525},
2526},
2527},
2528{ // Even though the subquery is step invariant, the inside is also wrapped separately.
2529input: `sum(foo{bar="baz"} @ 20)[10m:6s] @ 10`,
2530expected: &parser.StepInvariantExpr{
2531Expr: &parser.SubqueryExpr{
2532Expr: &parser.StepInvariantExpr{
2533Expr: &parser.AggregateExpr{
2534Op: parser.SUM,
2535Expr: &parser.VectorSelector{
2536Name: "foo",
2537LabelMatchers: []*labels.Matcher{
2538parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"),
2539parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2540},
2541PosRange: posrange.PositionRange{
2542Start: 4,
2543End: 23,
2544},
2545Timestamp: makeInt64Pointer(20000),
2546},
2547PosRange: posrange.PositionRange{
2548Start: 0,
2549End: 24,
2550},
2551},
2552},
2553Range: 10 * time.Minute,
2554Step: 6 * time.Second,
2555Timestamp: makeInt64Pointer(10000),
2556EndPos: 37,
2557},
2558},
2559},
2560{
2561input: `min_over_time(rate(foo{bar="baz"}[2s])[5m:] @ 1603775091)[4m:3s]`,
2562expected: &parser.SubqueryExpr{
2563Expr: &parser.StepInvariantExpr{
2564Expr: &parser.Call{
2565Func: parser.MustGetFunction("min_over_time"),
2566Args: parser.Expressions{
2567&parser.SubqueryExpr{
2568Expr: &parser.Call{
2569Func: parser.MustGetFunction("rate"),
2570Args: parser.Expressions{
2571&parser.MatrixSelector{
2572VectorSelector: &parser.VectorSelector{
2573Name: "foo",
2574LabelMatchers: []*labels.Matcher{
2575parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"),
2576parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2577},
2578PosRange: posrange.PositionRange{
2579Start: 19,
2580End: 33,
2581},
2582},
2583Range: 2 * time.Second,
2584EndPos: 37,
2585},
2586},
2587PosRange: posrange.PositionRange{
2588Start: 14,
2589End: 38,
2590},
2591},
2592Range: 5 * time.Minute,
2593Timestamp: makeInt64Pointer(1603775091000),
2594EndPos: 56,
2595},
2596},
2597PosRange: posrange.PositionRange{
2598Start: 0,
2599End: 57,
2600},
2601},
2602},
2603Range: 4 * time.Minute,
2604Step: 3 * time.Second,
2605EndPos: 64,
2606},
2607},
2608{
2609input: `some_metric @ 123 offset 1m [10m:5s]`,
2610expected: &parser.SubqueryExpr{
2611Expr: &parser.StepInvariantExpr{
2612Expr: &parser.VectorSelector{
2613Name: "some_metric",
2614LabelMatchers: []*labels.Matcher{
2615parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2616},
2617PosRange: posrange.PositionRange{
2618Start: 0,
2619End: 27,
2620},
2621Timestamp: makeInt64Pointer(123000),
2622OriginalOffset: 1 * time.Minute,
2623},
2624},
2625Range: 10 * time.Minute,
2626Step: 5 * time.Second,
2627EndPos: 36,
2628},
2629},
2630{
2631input: `some_metric[10m:5s] offset 1m @ 123`,
2632expected: &parser.StepInvariantExpr{
2633Expr: &parser.SubqueryExpr{
2634Expr: &parser.VectorSelector{
2635Name: "some_metric",
2636LabelMatchers: []*labels.Matcher{
2637parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2638},
2639PosRange: posrange.PositionRange{
2640Start: 0,
2641End: 11,
2642},
2643},
2644Timestamp: makeInt64Pointer(123000),
2645OriginalOffset: 1 * time.Minute,
2646Range: 10 * time.Minute,
2647Step: 5 * time.Second,
2648EndPos: 35,
2649},
2650},
2651},
2652{
2653input: `(foo + bar{nm="val"} @ 1234)[5m:] @ 1603775019`,
2654expected: &parser.StepInvariantExpr{
2655Expr: &parser.SubqueryExpr{
2656Expr: &parser.ParenExpr{
2657Expr: &parser.BinaryExpr{
2658Op: parser.ADD,
2659VectorMatching: &parser.VectorMatching{
2660Card: parser.CardOneToOne,
2661},
2662LHS: &parser.VectorSelector{
2663Name: "foo",
2664LabelMatchers: []*labels.Matcher{
2665parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2666},
2667PosRange: posrange.PositionRange{
2668Start: 1,
2669End: 4,
2670},
2671},
2672RHS: &parser.StepInvariantExpr{
2673Expr: &parser.VectorSelector{
2674Name: "bar",
2675LabelMatchers: []*labels.Matcher{
2676parser.MustLabelMatcher(labels.MatchEqual, "nm", "val"),
2677parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"),
2678},
2679Timestamp: makeInt64Pointer(1234000),
2680PosRange: posrange.PositionRange{
2681Start: 7,
2682End: 27,
2683},
2684},
2685},
2686},
2687PosRange: posrange.PositionRange{
2688Start: 0,
2689End: 28,
2690},
2691},
2692Range: 5 * time.Minute,
2693Timestamp: makeInt64Pointer(1603775019000),
2694EndPos: 46,
2695},
2696},
2697},
2698{
2699input: "abs(abs(metric @ 10))",
2700expected: &parser.StepInvariantExpr{
2701Expr: &parser.Call{
2702Func: &parser.Function{
2703Name: "abs",
2704ArgTypes: []parser.ValueType{parser.ValueTypeVector},
2705ReturnType: parser.ValueTypeVector,
2706},
2707Args: parser.Expressions{&parser.Call{
2708Func: &parser.Function{
2709Name: "abs",
2710ArgTypes: []parser.ValueType{parser.ValueTypeVector},
2711ReturnType: parser.ValueTypeVector,
2712},
2713Args: parser.Expressions{&parser.VectorSelector{
2714Name: "metric",
2715LabelMatchers: []*labels.Matcher{
2716parser.MustLabelMatcher(labels.MatchEqual, "__name__", "metric"),
2717},
2718PosRange: posrange.PositionRange{
2719Start: 8,
2720End: 19,
2721},
2722Timestamp: makeInt64Pointer(10000),
2723}},
2724PosRange: posrange.PositionRange{
2725Start: 4,
2726End: 20,
2727},
2728}},
2729PosRange: posrange.PositionRange{
2730Start: 0,
2731End: 21,
2732},
2733},
2734},
2735},
2736{
2737input: "sum(sum(some_metric1 @ 10) + sum(some_metric2 @ 20))",
2738expected: &parser.StepInvariantExpr{
2739Expr: &parser.AggregateExpr{
2740Op: parser.SUM,
2741Expr: &parser.BinaryExpr{
2742Op: parser.ADD,
2743VectorMatching: &parser.VectorMatching{},
2744LHS: &parser.AggregateExpr{
2745Op: parser.SUM,
2746Expr: &parser.VectorSelector{
2747Name: "some_metric1",
2748LabelMatchers: []*labels.Matcher{
2749parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric1"),
2750},
2751PosRange: posrange.PositionRange{
2752Start: 8,
2753End: 25,
2754},
2755Timestamp: makeInt64Pointer(10000),
2756},
2757PosRange: posrange.PositionRange{
2758Start: 4,
2759End: 26,
2760},
2761},
2762RHS: &parser.AggregateExpr{
2763Op: parser.SUM,
2764Expr: &parser.VectorSelector{
2765Name: "some_metric2",
2766LabelMatchers: []*labels.Matcher{
2767parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric2"),
2768},
2769PosRange: posrange.PositionRange{
2770Start: 33,
2771End: 50,
2772},
2773Timestamp: makeInt64Pointer(20000),
2774},
2775PosRange: posrange.PositionRange{
2776Start: 29,
2777End: 52,
2778},
2779},
2780},
2781PosRange: posrange.PositionRange{
2782Start: 0,
2783End: 52,
2784},
2785},
2786},
2787},
2788{
2789input: `foo @ start()`,
2790expected: &parser.StepInvariantExpr{
2791Expr: &parser.VectorSelector{
2792Name: "foo",
2793LabelMatchers: []*labels.Matcher{
2794parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2795},
2796PosRange: posrange.PositionRange{
2797Start: 0,
2798End: 13,
2799},
2800Timestamp: makeInt64Pointer(timestamp.FromTime(startTime)),
2801StartOrEnd: parser.START,
2802},
2803},
2804},
2805{
2806input: `foo @ end()`,
2807expected: &parser.StepInvariantExpr{
2808Expr: &parser.VectorSelector{
2809Name: "foo",
2810LabelMatchers: []*labels.Matcher{
2811parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2812},
2813PosRange: posrange.PositionRange{
2814Start: 0,
2815End: 11,
2816},
2817Timestamp: makeInt64Pointer(timestamp.FromTime(endTime)),
2818StartOrEnd: parser.END,
2819},
2820},
2821},
2822{
2823input: `test[5y] @ start()`,
2824expected: &parser.StepInvariantExpr{
2825Expr: &parser.MatrixSelector{
2826VectorSelector: &parser.VectorSelector{
2827Name: "test",
2828Timestamp: makeInt64Pointer(timestamp.FromTime(startTime)),
2829StartOrEnd: parser.START,
2830LabelMatchers: []*labels.Matcher{
2831parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"),
2832},
2833PosRange: posrange.PositionRange{
2834Start: 0,
2835End: 4,
2836},
2837},
2838Range: 5 * 365 * 24 * time.Hour,
2839EndPos: 18,
2840},
2841},
2842},
2843{
2844input: `test[5y] @ end()`,
2845expected: &parser.StepInvariantExpr{
2846Expr: &parser.MatrixSelector{
2847VectorSelector: &parser.VectorSelector{
2848Name: "test",
2849Timestamp: makeInt64Pointer(timestamp.FromTime(endTime)),
2850StartOrEnd: parser.END,
2851LabelMatchers: []*labels.Matcher{
2852parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"),
2853},
2854PosRange: posrange.PositionRange{
2855Start: 0,
2856End: 4,
2857},
2858},
2859Range: 5 * 365 * 24 * time.Hour,
2860EndPos: 16,
2861},
2862},
2863},
2864{
2865input: `some_metric[10m:5s] @ start()`,
2866expected: &parser.StepInvariantExpr{
2867Expr: &parser.SubqueryExpr{
2868Expr: &parser.VectorSelector{
2869Name: "some_metric",
2870LabelMatchers: []*labels.Matcher{
2871parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2872},
2873PosRange: posrange.PositionRange{
2874Start: 0,
2875End: 11,
2876},
2877},
2878Timestamp: makeInt64Pointer(timestamp.FromTime(startTime)),
2879StartOrEnd: parser.START,
2880Range: 10 * time.Minute,
2881Step: 5 * time.Second,
2882EndPos: 29,
2883},
2884},
2885},
2886{
2887input: `some_metric[10m:5s] @ end()`,
2888expected: &parser.StepInvariantExpr{
2889Expr: &parser.SubqueryExpr{
2890Expr: &parser.VectorSelector{
2891Name: "some_metric",
2892LabelMatchers: []*labels.Matcher{
2893parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2894},
2895PosRange: posrange.PositionRange{
2896Start: 0,
2897End: 11,
2898},
2899},
2900Timestamp: makeInt64Pointer(timestamp.FromTime(endTime)),
2901StartOrEnd: parser.END,
2902Range: 10 * time.Minute,
2903Step: 5 * time.Second,
2904EndPos: 27,
2905},
2906},
2907},
2908{
2909input: `floor(some_metric / (3 * 1024))`,
2910outputTest: true,
2911expected: &parser.Call{
2912Func: &parser.Function{
2913Name: "floor",
2914ArgTypes: []parser.ValueType{parser.ValueTypeVector},
2915ReturnType: parser.ValueTypeVector,
2916},
2917Args: parser.Expressions{
2918&parser.BinaryExpr{
2919Op: parser.DIV,
2920LHS: &parser.VectorSelector{
2921Name: "some_metric",
2922LabelMatchers: []*labels.Matcher{
2923parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2924},
2925PosRange: posrange.PositionRange{
2926Start: 6,
2927End: 17,
2928},
2929},
2930RHS: &parser.StepInvariantExpr{
2931Expr: &parser.ParenExpr{
2932Expr: &parser.BinaryExpr{
2933Op: parser.MUL,
2934LHS: &parser.NumberLiteral{
2935Val: 3,
2936PosRange: posrange.PositionRange{
2937Start: 21,
2938End: 22,
2939},
2940},
2941RHS: &parser.NumberLiteral{
2942Val: 1024,
2943PosRange: posrange.PositionRange{
2944Start: 25,
2945End: 29,
2946},
2947},
2948},
2949PosRange: posrange.PositionRange{
2950Start: 20,
2951End: 30,
2952},
2953},
2954},
2955},
2956},
2957PosRange: posrange.PositionRange{
2958Start: 0,
2959End: 31,
2960},
2961},
2962},
2963}
2964
2965for _, test := range testCases {
2966t.Run(test.input, func(t *testing.T) {
2967expr, err := parser.ParseExpr(test.input)
2968require.NoError(t, err)
2969expr = promql.PreprocessExpr(expr, startTime, endTime)
2970if test.outputTest {
2971require.Equal(t, test.input, expr.String(), "error on input '%s'", test.input)
2972}
2973require.Equal(t, test.expected, expr, "error on input '%s'", test.input)
2974})
2975}
2976}
2977
2978func TestEngineOptsValidation(t *testing.T) {
2979cases := []struct {
2980opts promql.EngineOpts
2981query string
2982fail bool
2983expError error
2984}{
2985{
2986opts: promql.EngineOpts{EnableAtModifier: false},
2987query: "metric @ 100", fail: true, expError: promql.ErrValidationAtModifierDisabled,
2988}, {
2989opts: promql.EngineOpts{EnableAtModifier: false},
2990query: "rate(metric[1m] @ 100)", fail: true, expError: promql.ErrValidationAtModifierDisabled,
2991}, {
2992opts: promql.EngineOpts{EnableAtModifier: false},
2993query: "rate(metric[1h:1m] @ 100)", fail: true, expError: promql.ErrValidationAtModifierDisabled,
2994}, {
2995opts: promql.EngineOpts{EnableAtModifier: false},
2996query: "metric @ start()", fail: true, expError: promql.ErrValidationAtModifierDisabled,
2997}, {
2998opts: promql.EngineOpts{EnableAtModifier: false},
2999query: "rate(metric[1m] @ start())", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3000}, {
3001opts: promql.EngineOpts{EnableAtModifier: false},
3002query: "rate(metric[1h:1m] @ start())", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3003}, {
3004opts: promql.EngineOpts{EnableAtModifier: false},
3005query: "metric @ end()", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3006}, {
3007opts: promql.EngineOpts{EnableAtModifier: false},
3008query: "rate(metric[1m] @ end())", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3009}, {
3010opts: promql.EngineOpts{EnableAtModifier: false},
3011query: "rate(metric[1h:1m] @ end())", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3012}, {
3013opts: promql.EngineOpts{EnableAtModifier: true},
3014query: "metric @ 100",
3015}, {
3016opts: promql.EngineOpts{EnableAtModifier: true},
3017query: "rate(metric[1m] @ start())",
3018}, {
3019opts: promql.EngineOpts{EnableAtModifier: true},
3020query: "rate(metric[1h:1m] @ end())",
3021}, {
3022opts: promql.EngineOpts{EnableNegativeOffset: false},
3023query: "metric offset -1s", fail: true, expError: promql.ErrValidationNegativeOffsetDisabled,
3024}, {
3025opts: promql.EngineOpts{EnableNegativeOffset: true},
3026query: "metric offset -1s",
3027}, {
3028opts: promql.EngineOpts{EnableAtModifier: true, EnableNegativeOffset: true},
3029query: "metric @ 100 offset -2m",
3030}, {
3031opts: promql.EngineOpts{EnableAtModifier: true, EnableNegativeOffset: true},
3032query: "metric offset -2m @ 100",
3033},
3034}
3035
3036for _, c := range cases {
3037eng := promql.NewEngine(c.opts)
3038_, err1 := eng.NewInstantQuery(context.Background(), nil, nil, c.query, time.Unix(10, 0))
3039_, err2 := eng.NewRangeQuery(context.Background(), nil, nil, c.query, time.Unix(0, 0), time.Unix(10, 0), time.Second)
3040if c.fail {
3041require.Equal(t, c.expError, err1)
3042require.Equal(t, c.expError, err2)
3043} else {
3044require.NoError(t, err1)
3045require.NoError(t, err2)
3046}
3047}
3048}
3049
3050func TestRangeQuery(t *testing.T) {
3051cases := []struct {
3052Name string
3053Load string
3054Query string
3055Result parser.Value
3056Start time.Time
3057End time.Time
3058Interval time.Duration
3059}{
3060{
3061Name: "sum_over_time with all values",
3062Load: `load 30s
3063bar 0 1 10 100 1000`,
3064Query: "sum_over_time(bar[30s])",
3065Result: promql.Matrix{
3066promql.Series{
3067Floats: []promql.FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}},
3068Metric: labels.EmptyLabels(),
3069},
3070},
3071Start: time.Unix(0, 0),
3072End: time.Unix(120, 0),
3073Interval: 60 * time.Second,
3074},
3075{
3076Name: "sum_over_time with trailing values",
3077Load: `load 30s
3078bar 0 1 10 100 1000 0 0 0 0`,
3079Query: "sum_over_time(bar[30s])",
3080Result: promql.Matrix{
3081promql.Series{
3082Floats: []promql.FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}},
3083Metric: labels.EmptyLabels(),
3084},
3085},
3086Start: time.Unix(0, 0),
3087End: time.Unix(120, 0),
3088Interval: 60 * time.Second,
3089},
3090{
3091Name: "sum_over_time with all values long",
3092Load: `load 30s
3093bar 0 1 10 100 1000 10000 100000 1000000 10000000`,
3094Query: "sum_over_time(bar[30s])",
3095Result: promql.Matrix{
3096promql.Series{
3097Floats: []promql.FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}, {F: 110000, T: 180000}, {F: 11000000, T: 240000}},
3098Metric: labels.EmptyLabels(),
3099},
3100},
3101Start: time.Unix(0, 0),
3102End: time.Unix(240, 0),
3103Interval: 60 * time.Second,
3104},
3105{
3106Name: "sum_over_time with all values random",
3107Load: `load 30s
3108bar 5 17 42 2 7 905 51`,
3109Query: "sum_over_time(bar[30s])",
3110Result: promql.Matrix{
3111promql.Series{
3112Floats: []promql.FPoint{{F: 5, T: 0}, {F: 59, T: 60000}, {F: 9, T: 120000}, {F: 956, T: 180000}},
3113Metric: labels.EmptyLabels(),
3114},
3115},
3116Start: time.Unix(0, 0),
3117End: time.Unix(180, 0),
3118Interval: 60 * time.Second,
3119},
3120{
3121Name: "metric query",
3122Load: `load 30s
3123metric 1+1x4`,
3124Query: "metric",
3125Result: promql.Matrix{
3126promql.Series{
3127Floats: []promql.FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
3128Metric: labels.FromStrings("__name__", "metric"),
3129},
3130},
3131Start: time.Unix(0, 0),
3132End: time.Unix(120, 0),
3133Interval: 1 * time.Minute,
3134},
3135{
3136Name: "metric query with trailing values",
3137Load: `load 30s
3138metric 1+1x8`,
3139Query: "metric",
3140Result: promql.Matrix{
3141promql.Series{
3142Floats: []promql.FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
3143Metric: labels.FromStrings("__name__", "metric"),
3144},
3145},
3146Start: time.Unix(0, 0),
3147End: time.Unix(120, 0),
3148Interval: 1 * time.Minute,
3149},
3150{
3151Name: "short-circuit",
3152Load: `load 30s
3153foo{job="1"} 1+1x4
3154bar{job="2"} 1+1x4`,
3155Query: `foo > 2 or bar`,
3156Result: promql.Matrix{
3157promql.Series{
3158Floats: []promql.FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
3159Metric: labels.FromStrings(
3160"__name__", "bar",
3161"job", "2",
3162),
3163},
3164promql.Series{
3165Floats: []promql.FPoint{{F: 3, T: 60000}, {F: 5, T: 120000}},
3166Metric: labels.FromStrings(
3167"__name__", "foo",
3168"job", "1",
3169),
3170},
3171},
3172Start: time.Unix(0, 0),
3173End: time.Unix(120, 0),
3174Interval: 1 * time.Minute,
3175},
3176{
3177Name: "drop-metric-name",
3178Load: `load 30s
3179requests{job="1", __address__="bar"} 100`,
3180Query: `requests * 2`,
3181Result: promql.Matrix{
3182promql.Series{
3183Floats: []promql.FPoint{{F: 200, T: 0}, {F: 200, T: 60000}, {F: 200, T: 120000}},
3184Metric: labels.FromStrings(
3185"__address__", "bar",
3186"job", "1",
3187),
3188},
3189},
3190Start: time.Unix(0, 0),
3191End: time.Unix(120, 0),
3192Interval: 1 * time.Minute,
3193},
3194}
3195for _, c := range cases {
3196t.Run(c.Name, func(t *testing.T) {
3197engine := newTestEngine()
3198storage := promqltest.LoadedStorage(t, c.Load)
3199t.Cleanup(func() { storage.Close() })
3200
3201qry, err := engine.NewRangeQuery(context.Background(), storage, nil, c.Query, c.Start, c.End, c.Interval)
3202require.NoError(t, err)
3203
3204res := qry.Exec(context.Background())
3205require.NoError(t, res.Err)
3206testutil.RequireEqual(t, c.Result, res.Value)
3207})
3208}
3209}
3210
3211func TestNativeHistogram_Sum_Count_Add_AvgOperator(t *testing.T) {
3212// TODO(codesome): Integrate histograms into the PromQL testing framework
3213// and write more tests there.
3214cases := []struct {
3215histograms []histogram.Histogram
3216expected histogram.FloatHistogram
3217expectedAvg histogram.FloatHistogram
3218}{
3219{
3220histograms: []histogram.Histogram{
3221{
3222CounterResetHint: histogram.GaugeType,
3223Schema: 0,
3224Count: 25,
3225Sum: 1234.5,
3226ZeroThreshold: 0.001,
3227ZeroCount: 4,
3228PositiveSpans: []histogram.Span{
3229{Offset: 0, Length: 2},
3230{Offset: 1, Length: 2},
3231},
3232PositiveBuckets: []int64{1, 1, -1, 0},
3233NegativeSpans: []histogram.Span{
3234{Offset: 0, Length: 2},
3235{Offset: 2, Length: 2},
3236},
3237NegativeBuckets: []int64{2, 2, -3, 8},
3238},
3239{
3240CounterResetHint: histogram.GaugeType,
3241Schema: 0,
3242Count: 41,
3243Sum: 2345.6,
3244ZeroThreshold: 0.001,
3245ZeroCount: 5,
3246PositiveSpans: []histogram.Span{
3247{Offset: 0, Length: 4},
3248{Offset: 0, Length: 0},
3249{Offset: 0, Length: 3},
3250},
3251PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3252NegativeSpans: []histogram.Span{
3253{Offset: 1, Length: 4},
3254{Offset: 2, Length: 0},
3255{Offset: 2, Length: 3},
3256},
3257NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3258},
3259{
3260CounterResetHint: histogram.GaugeType,
3261Schema: 0,
3262Count: 41,
3263Sum: 1111.1,
3264ZeroThreshold: 0.001,
3265ZeroCount: 5,
3266PositiveSpans: []histogram.Span{
3267{Offset: 0, Length: 4},
3268{Offset: 0, Length: 0},
3269{Offset: 0, Length: 3},
3270},
3271PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3272NegativeSpans: []histogram.Span{
3273{Offset: 1, Length: 4},
3274{Offset: 2, Length: 0},
3275{Offset: 2, Length: 3},
3276},
3277NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3278},
3279{
3280CounterResetHint: histogram.GaugeType,
3281Schema: 1, // Everything is 0 just to make the count 4 so avg has nicer numbers.
3282},
3283},
3284expected: histogram.FloatHistogram{
3285CounterResetHint: histogram.GaugeType,
3286Schema: 0,
3287ZeroThreshold: 0.001,
3288ZeroCount: 14,
3289Count: 107,
3290Sum: 4691.2,
3291PositiveSpans: []histogram.Span{
3292{Offset: 0, Length: 7},
3293},
3294PositiveBuckets: []float64{3, 8, 2, 5, 3, 2, 2},
3295NegativeSpans: []histogram.Span{
3296{Offset: 0, Length: 6},
3297{Offset: 3, Length: 3},
3298},
3299NegativeBuckets: []float64{2, 6, 8, 4, 15, 9, 10, 10, 4},
3300},
3301expectedAvg: histogram.FloatHistogram{
3302CounterResetHint: histogram.GaugeType,
3303Schema: 0,
3304ZeroThreshold: 0.001,
3305ZeroCount: 3.5,
3306Count: 26.75,
3307Sum: 1172.8,
3308PositiveSpans: []histogram.Span{
3309{Offset: 0, Length: 7},
3310},
3311PositiveBuckets: []float64{0.75, 2, 0.5, 1.25, 0.75, 0.5, 0.5},
3312NegativeSpans: []histogram.Span{
3313{Offset: 0, Length: 6},
3314{Offset: 3, Length: 3},
3315},
3316NegativeBuckets: []float64{0.5, 1.5, 2, 1, 3.75, 2.25, 2.5, 2.5, 1},
3317},
3318},
3319}
3320
3321idx0 := int64(0)
3322for _, c := range cases {
3323for _, floatHisto := range []bool{true, false} {
3324t.Run(fmt.Sprintf("floatHistogram=%t %d", floatHisto, idx0), func(t *testing.T) {
3325storage := teststorage.New(t)
3326t.Cleanup(func() { storage.Close() })
3327
3328seriesName := "sparse_histogram_series"
3329seriesNameOverTime := "sparse_histogram_series_over_time"
3330
3331engine := newTestEngine()
3332
3333ts := idx0 * int64(10*time.Minute/time.Millisecond)
3334app := storage.Appender(context.Background())
3335_, err := app.Append(0, labels.FromStrings("__name__", "float_series", "idx", "0"), ts, 42)
3336require.NoError(t, err)
3337for idx1, h := range c.histograms {
3338lbls := labels.FromStrings("__name__", seriesName, "idx", strconv.Itoa(idx1))
3339// Since we mutate h later, we need to create a copy here.
3340var err error
3341if floatHisto {
3342_, err = app.AppendHistogram(0, lbls, ts, nil, h.Copy().ToFloat(nil))
3343} else {
3344_, err = app.AppendHistogram(0, lbls, ts, h.Copy(), nil)
3345}
3346require.NoError(t, err)
3347
3348lbls = labels.FromStrings("__name__", seriesNameOverTime)
3349newTs := ts + int64(idx1)*int64(time.Minute/time.Millisecond)
3350// Since we mutate h later, we need to create a copy here.
3351if floatHisto {
3352_, err = app.AppendHistogram(0, lbls, newTs, nil, h.Copy().ToFloat(nil))
3353} else {
3354_, err = app.AppendHistogram(0, lbls, newTs, h.Copy(), nil)
3355}
3356require.NoError(t, err)
3357}
3358require.NoError(t, app.Commit())
3359
3360queryAndCheck := func(queryString string, ts int64, exp promql.Vector) {
3361qry, err := engine.NewInstantQuery(context.Background(), storage, nil, queryString, timestamp.Time(ts))
3362require.NoError(t, err)
3363
3364res := qry.Exec(context.Background())
3365require.NoError(t, res.Err)
3366require.Empty(t, res.Warnings)
3367
3368vector, err := res.Vector()
3369require.NoError(t, err)
3370
3371testutil.RequireEqual(t, exp, vector)
3372}
3373queryAndCheckAnnotations := func(queryString string, ts int64, expWarnings annotations.Annotations) {
3374qry, err := engine.NewInstantQuery(context.Background(), storage, nil, queryString, timestamp.Time(ts))
3375require.NoError(t, err)
3376
3377res := qry.Exec(context.Background())
3378require.NoError(t, res.Err)
3379require.Equal(t, expWarnings, res.Warnings)
3380}
3381
3382// sum().
3383queryString := fmt.Sprintf("sum(%s)", seriesName)
3384queryAndCheck(queryString, ts, []promql.Sample{{T: ts, H: &c.expected, Metric: labels.EmptyLabels()}})
3385
3386queryString = `sum({idx="0"})`
3387var annos annotations.Annotations
3388annos.Add(annotations.NewMixedFloatsHistogramsAggWarning(posrange.PositionRange{Start: 4, End: 13}))
3389queryAndCheckAnnotations(queryString, ts, annos)
3390
3391// + operator.
3392queryString = fmt.Sprintf(`%s{idx="0"}`, seriesName)
3393for idx := 1; idx < len(c.histograms); idx++ {
3394queryString += fmt.Sprintf(` + ignoring(idx) %s{idx="%d"}`, seriesName, idx)
3395}
3396queryAndCheck(queryString, ts, []promql.Sample{{T: ts, H: &c.expected, Metric: labels.EmptyLabels()}})
3397
3398// count().
3399queryString = fmt.Sprintf("count(%s)", seriesName)
3400queryAndCheck(queryString, ts, []promql.Sample{{T: ts, F: 4, Metric: labels.EmptyLabels()}})
3401
3402// avg().
3403queryString = fmt.Sprintf("avg(%s)", seriesName)
3404queryAndCheck(queryString, ts, []promql.Sample{{T: ts, H: &c.expectedAvg, Metric: labels.EmptyLabels()}})
3405
3406offset := int64(len(c.histograms) - 1)
3407newTs := ts + offset*int64(time.Minute/time.Millisecond)
3408
3409// sum_over_time().
3410queryString = fmt.Sprintf("sum_over_time(%s[%dm:1m])", seriesNameOverTime, offset)
3411queryAndCheck(queryString, newTs, []promql.Sample{{T: newTs, H: &c.expected, Metric: labels.EmptyLabels()}})
3412
3413// avg_over_time().
3414queryString = fmt.Sprintf("avg_over_time(%s[%dm:1m])", seriesNameOverTime, offset)
3415queryAndCheck(queryString, newTs, []promql.Sample{{T: newTs, H: &c.expectedAvg, Metric: labels.EmptyLabels()}})
3416})
3417idx0++
3418}
3419}
3420}
3421
3422func TestNativeHistogram_SubOperator(t *testing.T) {
3423// TODO(codesome): Integrate histograms into the PromQL testing framework
3424// and write more tests there.
3425cases := []struct {
3426histograms []histogram.Histogram
3427expected histogram.FloatHistogram
3428}{
3429{
3430histograms: []histogram.Histogram{
3431{
3432Schema: 0,
3433Count: 41,
3434Sum: 2345.6,
3435ZeroThreshold: 0.001,
3436ZeroCount: 5,
3437PositiveSpans: []histogram.Span{
3438{Offset: 0, Length: 4},
3439{Offset: 0, Length: 0},
3440{Offset: 0, Length: 3},
3441},
3442PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3443NegativeSpans: []histogram.Span{
3444{Offset: 1, Length: 4},
3445{Offset: 2, Length: 0},
3446{Offset: 2, Length: 3},
3447},
3448NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3449},
3450{
3451Schema: 0,
3452Count: 11,
3453Sum: 1234.5,
3454ZeroThreshold: 0.001,
3455ZeroCount: 3,
3456PositiveSpans: []histogram.Span{
3457{Offset: 1, Length: 2},
3458},
3459PositiveBuckets: []int64{2, -1},
3460NegativeSpans: []histogram.Span{
3461{Offset: 2, Length: 2},
3462},
3463NegativeBuckets: []int64{3, -1},
3464},
3465},
3466expected: histogram.FloatHistogram{
3467Schema: 0,
3468Count: 30,
3469Sum: 1111.1,
3470ZeroThreshold: 0.001,
3471ZeroCount: 2,
3472PositiveSpans: []histogram.Span{
3473{Offset: 0, Length: 2},
3474{Offset: 1, Length: 4},
3475},
3476PositiveBuckets: []float64{1, 1, 2, 1, 1, 1},
3477NegativeSpans: []histogram.Span{
3478{Offset: 1, Length: 2},
3479{Offset: 1, Length: 1},
3480{Offset: 4, Length: 3},
3481},
3482NegativeBuckets: []float64{1, 1, 7, 5, 5, 2},
3483},
3484},
3485{
3486histograms: []histogram.Histogram{
3487{
3488Schema: 0,
3489Count: 41,
3490Sum: 2345.6,
3491ZeroThreshold: 0.001,
3492ZeroCount: 5,
3493PositiveSpans: []histogram.Span{
3494{Offset: 0, Length: 4},
3495{Offset: 0, Length: 0},
3496{Offset: 0, Length: 3},
3497},
3498PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3499NegativeSpans: []histogram.Span{
3500{Offset: 1, Length: 4},
3501{Offset: 2, Length: 0},
3502{Offset: 2, Length: 3},
3503},
3504NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3505},
3506{
3507Schema: 1,
3508Count: 11,
3509Sum: 1234.5,
3510ZeroThreshold: 0.001,
3511ZeroCount: 3,
3512PositiveSpans: []histogram.Span{
3513{Offset: 1, Length: 2},
3514},
3515PositiveBuckets: []int64{2, -1},
3516NegativeSpans: []histogram.Span{
3517{Offset: 2, Length: 2},
3518},
3519NegativeBuckets: []int64{3, -1},
3520},
3521},
3522expected: histogram.FloatHistogram{
3523Schema: 0,
3524Count: 30,
3525Sum: 1111.1,
3526ZeroThreshold: 0.001,
3527ZeroCount: 2,
3528PositiveSpans: []histogram.Span{
3529{Offset: 0, Length: 1},
3530{Offset: 1, Length: 5},
3531},
3532PositiveBuckets: []float64{1, 1, 2, 1, 1, 1},
3533NegativeSpans: []histogram.Span{
3534{Offset: 1, Length: 4},
3535{Offset: 4, Length: 3},
3536},
3537NegativeBuckets: []float64{-2, 2, 2, 7, 5, 5, 2},
3538},
3539},
3540{
3541histograms: []histogram.Histogram{
3542{
3543Schema: 1,
3544Count: 11,
3545Sum: 1234.5,
3546ZeroThreshold: 0.001,
3547ZeroCount: 3,
3548PositiveSpans: []histogram.Span{
3549{Offset: 1, Length: 2},
3550},
3551PositiveBuckets: []int64{2, -1},
3552NegativeSpans: []histogram.Span{
3553{Offset: 2, Length: 2},
3554},
3555NegativeBuckets: []int64{3, -1},
3556},
3557{
3558Schema: 0,
3559Count: 41,
3560Sum: 2345.6,
3561ZeroThreshold: 0.001,
3562ZeroCount: 5,
3563PositiveSpans: []histogram.Span{
3564{Offset: 0, Length: 4},
3565{Offset: 0, Length: 0},
3566{Offset: 0, Length: 3},
3567},
3568PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3569NegativeSpans: []histogram.Span{
3570{Offset: 1, Length: 4},
3571{Offset: 2, Length: 0},
3572{Offset: 2, Length: 3},
3573},
3574NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3575},
3576},
3577expected: histogram.FloatHistogram{
3578Schema: 0,
3579Count: -30,
3580Sum: -1111.1,
3581ZeroThreshold: 0.001,
3582ZeroCount: -2,
3583PositiveSpans: []histogram.Span{
3584{Offset: 0, Length: 1},
3585{Offset: 1, Length: 5},
3586},
3587PositiveBuckets: []float64{-1, -1, -2, -1, -1, -1},
3588NegativeSpans: []histogram.Span{
3589{Offset: 1, Length: 4},
3590{Offset: 4, Length: 3},
3591},
3592NegativeBuckets: []float64{2, -2, -2, -7, -5, -5, -2},
3593},
3594},
3595}
3596
3597idx0 := int64(0)
3598for _, c := range cases {
3599for _, floatHisto := range []bool{true, false} {
3600t.Run(fmt.Sprintf("floatHistogram=%t %d", floatHisto, idx0), func(t *testing.T) {
3601engine := newTestEngine()
3602storage := teststorage.New(t)
3603t.Cleanup(func() { storage.Close() })
3604
3605seriesName := "sparse_histogram_series"
3606
3607ts := idx0 * int64(10*time.Minute/time.Millisecond)
3608app := storage.Appender(context.Background())
3609for idx1, h := range c.histograms {
3610lbls := labels.FromStrings("__name__", seriesName, "idx", strconv.Itoa(idx1))
3611// Since we mutate h later, we need to create a copy here.
3612var err error
3613if floatHisto {
3614_, err = app.AppendHistogram(0, lbls, ts, nil, h.Copy().ToFloat(nil))
3615} else {
3616_, err = app.AppendHistogram(0, lbls, ts, h.Copy(), nil)
3617}
3618require.NoError(t, err)
3619}
3620require.NoError(t, app.Commit())
3621
3622queryAndCheck := func(queryString string, exp promql.Vector) {
3623qry, err := engine.NewInstantQuery(context.Background(), storage, nil, queryString, timestamp.Time(ts))
3624require.NoError(t, err)
3625
3626res := qry.Exec(context.Background())
3627require.NoError(t, res.Err)
3628
3629vector, err := res.Vector()
3630require.NoError(t, err)
3631
3632if len(vector) == len(exp) {
3633for i, e := range exp {
3634got := vector[i].H
3635if got != e.H {
3636// Error messages are better if we compare structs, not pointers.
3637require.Equal(t, *e.H, *got)
3638}
3639}
3640}
3641
3642testutil.RequireEqual(t, exp, vector)
3643}
3644
3645// - operator.
3646queryString := fmt.Sprintf(`%s{idx="0"}`, seriesName)
3647for idx := 1; idx < len(c.histograms); idx++ {
3648queryString += fmt.Sprintf(` - ignoring(idx) %s{idx="%d"}`, seriesName, idx)
3649}
3650queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expected, Metric: labels.EmptyLabels()}})
3651})
3652}
3653idx0++
3654}
3655}
3656
3657func TestNativeHistogram_MulDivOperator(t *testing.T) {
3658// TODO(codesome): Integrate histograms into the PromQL testing framework
3659// and write more tests there.
3660originalHistogram := histogram.Histogram{
3661Schema: 0,
3662Count: 21,
3663Sum: 33,
3664ZeroThreshold: 0.001,
3665ZeroCount: 3,
3666PositiveSpans: []histogram.Span{
3667{Offset: 0, Length: 3},
3668},
3669PositiveBuckets: []int64{3, 0, 0},
3670NegativeSpans: []histogram.Span{
3671{Offset: 0, Length: 3},
3672},
3673NegativeBuckets: []int64{3, 0, 0},
3674}
3675
3676cases := []struct {
3677scalar float64
3678histogram histogram.Histogram
3679expectedMul histogram.FloatHistogram
3680expectedDiv histogram.FloatHistogram
3681}{
3682{
3683scalar: 3,
3684histogram: originalHistogram,
3685expectedMul: histogram.FloatHistogram{
3686Schema: 0,
3687Count: 63,
3688Sum: 99,
3689ZeroThreshold: 0.001,
3690ZeroCount: 9,
3691PositiveSpans: []histogram.Span{
3692{Offset: 0, Length: 3},
3693},
3694PositiveBuckets: []float64{9, 9, 9},
3695NegativeSpans: []histogram.Span{
3696{Offset: 0, Length: 3},
3697},
3698NegativeBuckets: []float64{9, 9, 9},
3699},
3700expectedDiv: histogram.FloatHistogram{
3701Schema: 0,
3702Count: 7,
3703Sum: 11,
3704ZeroThreshold: 0.001,
3705ZeroCount: 1,
3706PositiveSpans: []histogram.Span{
3707{Offset: 0, Length: 3},
3708},
3709PositiveBuckets: []float64{1, 1, 1},
3710NegativeSpans: []histogram.Span{
3711{Offset: 0, Length: 3},
3712},
3713NegativeBuckets: []float64{1, 1, 1},
3714},
3715},
3716{
3717scalar: 0,
3718histogram: originalHistogram,
3719expectedMul: histogram.FloatHistogram{
3720Schema: 0,
3721Count: 0,
3722Sum: 0,
3723ZeroThreshold: 0.001,
3724ZeroCount: 0,
3725PositiveSpans: []histogram.Span{
3726{Offset: 0, Length: 3},
3727},
3728PositiveBuckets: []float64{0, 0, 0},
3729NegativeSpans: []histogram.Span{
3730{Offset: 0, Length: 3},
3731},
3732NegativeBuckets: []float64{0, 0, 0},
3733},
3734expectedDiv: histogram.FloatHistogram{
3735Schema: 0,
3736Count: math.Inf(1),
3737Sum: math.Inf(1),
3738ZeroThreshold: 0.001,
3739ZeroCount: math.Inf(1),
3740PositiveSpans: []histogram.Span{
3741{Offset: 0, Length: 3},
3742},
3743PositiveBuckets: []float64{math.Inf(1), math.Inf(1), math.Inf(1)},
3744NegativeSpans: []histogram.Span{
3745{Offset: 0, Length: 3},
3746},
3747NegativeBuckets: []float64{math.Inf(1), math.Inf(1), math.Inf(1)},
3748},
3749},
3750}
3751
3752idx0 := int64(0)
3753for _, c := range cases {
3754for _, floatHisto := range []bool{true, false} {
3755t.Run(fmt.Sprintf("floatHistogram=%t %d", floatHisto, idx0), func(t *testing.T) {
3756storage := teststorage.New(t)
3757t.Cleanup(func() { storage.Close() })
3758
3759seriesName := "sparse_histogram_series"
3760floatSeriesName := "float_series"
3761
3762engine := newTestEngine()
3763
3764ts := idx0 * int64(10*time.Minute/time.Millisecond)
3765app := storage.Appender(context.Background())
3766h := c.histogram
3767lbls := labels.FromStrings("__name__", seriesName)
3768// Since we mutate h later, we need to create a copy here.
3769var err error
3770if floatHisto {
3771_, err = app.AppendHistogram(0, lbls, ts, nil, h.Copy().ToFloat(nil))
3772} else {
3773_, err = app.AppendHistogram(0, lbls, ts, h.Copy(), nil)
3774}
3775require.NoError(t, err)
3776_, err = app.Append(0, labels.FromStrings("__name__", floatSeriesName), ts, c.scalar)
3777require.NoError(t, err)
3778require.NoError(t, app.Commit())
3779
3780queryAndCheck := func(queryString string, exp promql.Vector) {
3781qry, err := engine.NewInstantQuery(context.Background(), storage, nil, queryString, timestamp.Time(ts))
3782require.NoError(t, err)
3783
3784res := qry.Exec(context.Background())
3785require.NoError(t, res.Err)
3786
3787vector, err := res.Vector()
3788require.NoError(t, err)
3789
3790testutil.RequireEqual(t, exp, vector)
3791}
3792
3793// histogram * scalar.
3794queryString := fmt.Sprintf(`%s * %f`, seriesName, c.scalar)
3795queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
3796
3797// scalar * histogram.
3798queryString = fmt.Sprintf(`%f * %s`, c.scalar, seriesName)
3799queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
3800
3801// histogram * float.
3802queryString = fmt.Sprintf(`%s * %s`, seriesName, floatSeriesName)
3803queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
3804
3805// float * histogram.
3806queryString = fmt.Sprintf(`%s * %s`, floatSeriesName, seriesName)
3807queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
3808
3809// histogram / scalar.
3810queryString = fmt.Sprintf(`%s / %f`, seriesName, c.scalar)
3811queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedDiv, Metric: labels.EmptyLabels()}})
3812
3813// histogram / float.
3814queryString = fmt.Sprintf(`%s / %s`, seriesName, floatSeriesName)
3815queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedDiv, Metric: labels.EmptyLabels()}})
3816})
3817idx0++
3818}
3819}
3820}
3821
3822func TestQueryLookbackDelta(t *testing.T) {
3823var (
3824load = `load 5m
3825metric 0 1 2
3826`
3827query = "metric"
3828lastDatapointTs = time.Unix(600, 0)
3829)
3830
3831cases := []struct {
3832name string
3833ts time.Time
3834engineLookback, queryLookback time.Duration
3835expectSamples bool
3836}{
3837{
3838name: "default lookback delta",
3839ts: lastDatapointTs.Add(defaultLookbackDelta),
3840expectSamples: true,
3841},
3842{
3843name: "outside default lookback delta",
3844ts: lastDatapointTs.Add(defaultLookbackDelta + time.Millisecond),
3845expectSamples: false,
3846},
3847{
3848name: "custom engine lookback delta",
3849ts: lastDatapointTs.Add(10 * time.Minute),
3850engineLookback: 10 * time.Minute,
3851expectSamples: true,
3852},
3853{
3854name: "outside custom engine lookback delta",
3855ts: lastDatapointTs.Add(10*time.Minute + time.Millisecond),
3856engineLookback: 10 * time.Minute,
3857expectSamples: false,
3858},
3859{
3860name: "custom query lookback delta",
3861ts: lastDatapointTs.Add(20 * time.Minute),
3862engineLookback: 10 * time.Minute,
3863queryLookback: 20 * time.Minute,
3864expectSamples: true,
3865},
3866{
3867name: "outside custom query lookback delta",
3868ts: lastDatapointTs.Add(20*time.Minute + time.Millisecond),
3869engineLookback: 10 * time.Minute,
3870queryLookback: 20 * time.Minute,
3871expectSamples: false,
3872},
3873{
3874name: "negative custom query lookback delta",
3875ts: lastDatapointTs.Add(20 * time.Minute),
3876engineLookback: -10 * time.Minute,
3877queryLookback: 20 * time.Minute,
3878expectSamples: true,
3879},
3880}
3881
3882for _, c := range cases {
3883c := c
3884t.Run(c.name, func(t *testing.T) {
3885engine := promqltest.NewTestEngine(false, c.engineLookback, promqltest.DefaultMaxSamplesPerQuery)
3886storage := promqltest.LoadedStorage(t, load)
3887t.Cleanup(func() { storage.Close() })
3888
3889opts := promql.NewPrometheusQueryOpts(false, c.queryLookback)
3890qry, err := engine.NewInstantQuery(context.Background(), storage, opts, query, c.ts)
3891require.NoError(t, err)
3892
3893res := qry.Exec(context.Background())
3894require.NoError(t, res.Err)
3895vec, ok := res.Value.(promql.Vector)
3896require.True(t, ok)
3897if c.expectSamples {
3898require.NotEmpty(t, vec)
3899} else {
3900require.Empty(t, vec)
3901}
3902})
3903}
3904}
3905
3906func makeInt64Pointer(val int64) *int64 {
3907valp := new(int64)
3908*valp = val
3909return valp
3910}
3911