prometheus

Форк
0
/
engine_test.go 
3910 строк · 112.1 Кб
1
// Copyright 2016 The Prometheus Authors
2
// Licensed under the Apache License, Version 2.0 (the "License");
3
// you may not use this file except in compliance with the License.
4
// You may obtain a copy of the License at
5
//
6
//     http://www.apache.org/licenses/LICENSE-2.0
7
//
8
// Unless required by applicable law or agreed to in writing, software
9
// distributed under the License is distributed on an "AS IS" BASIS,
10
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
// See the License for the specific language governing permissions and
12
// limitations under the License.
13

14
package promql_test
15

16
import (
17
	"context"
18
	"errors"
19
	"fmt"
20
	"math"
21
	"os"
22
	"sort"
23
	"strconv"
24
	"testing"
25
	"time"
26

27
	"github.com/stretchr/testify/require"
28
	"go.uber.org/goleak"
29

30
	"github.com/prometheus/prometheus/model/histogram"
31
	"github.com/prometheus/prometheus/model/labels"
32
	"github.com/prometheus/prometheus/model/timestamp"
33
	"github.com/prometheus/prometheus/promql"
34
	"github.com/prometheus/prometheus/promql/parser"
35
	"github.com/prometheus/prometheus/promql/parser/posrange"
36
	"github.com/prometheus/prometheus/promql/promqltest"
37
	"github.com/prometheus/prometheus/storage"
38
	"github.com/prometheus/prometheus/util/annotations"
39
	"github.com/prometheus/prometheus/util/stats"
40
	"github.com/prometheus/prometheus/util/teststorage"
41
	"github.com/prometheus/prometheus/util/testutil"
42
)
43

44
const (
45
	env                  = "query execution"
46
	defaultLookbackDelta = 5 * time.Minute
47
	defaultEpsilon       = 0.000001 // Relative error allowed for sample values.
48
)
49

50
func TestMain(m *testing.M) {
51
	goleak.VerifyTestMain(m)
52
}
53

54
func TestQueryConcurrency(t *testing.T) {
55
	maxConcurrency := 10
56

57
	dir, err := os.MkdirTemp("", "test_concurrency")
58
	require.NoError(t, err)
59
	defer os.RemoveAll(dir)
60
	queryTracker := promql.NewActiveQueryTracker(dir, maxConcurrency, nil)
61
	t.Cleanup(queryTracker.Close)
62

63
	opts := promql.EngineOpts{
64
		Logger:             nil,
65
		Reg:                nil,
66
		MaxSamples:         10,
67
		Timeout:            100 * time.Second,
68
		ActiveQueryTracker: queryTracker,
69
	}
70

71
	engine := promql.NewEngine(opts)
72
	ctx, cancelCtx := context.WithCancel(context.Background())
73
	defer cancelCtx()
74

75
	block := make(chan struct{})
76
	processing := make(chan struct{})
77
	done := make(chan int)
78
	defer close(done)
79

80
	f := func(context.Context) error {
81
		select {
82
		case processing <- struct{}{}:
83
		case <-done:
84
		}
85

86
		select {
87
		case <-block:
88
		case <-done:
89
		}
90
		return nil
91
	}
92

93
	for i := 0; i < maxConcurrency; i++ {
94
		q := engine.NewTestQuery(f)
95
		go q.Exec(ctx)
96
		select {
97
		case <-processing:
98
			// Expected.
99
		case <-time.After(20 * time.Millisecond):
100
			require.Fail(t, "Query within concurrency threshold not being executed")
101
		}
102
	}
103

104
	q := engine.NewTestQuery(f)
105
	go q.Exec(ctx)
106

107
	select {
108
	case <-processing:
109
		require.Fail(t, "Query above concurrency threshold being executed")
110
	case <-time.After(20 * time.Millisecond):
111
		// Expected.
112
	}
113

114
	// Terminate a running query.
115
	block <- struct{}{}
116

117
	select {
118
	case <-processing:
119
		// Expected.
120
	case <-time.After(20 * time.Millisecond):
121
		require.Fail(t, "Query within concurrency threshold not being executed")
122
	}
123

124
	// Terminate remaining queries.
125
	for i := 0; i < maxConcurrency; i++ {
126
		block <- struct{}{}
127
	}
128
}
129

130
// contextDone returns an error if the context was canceled or timed out.
131
func contextDone(ctx context.Context, env string) error {
132
	if err := ctx.Err(); err != nil {
133
		switch {
134
		case errors.Is(err, context.Canceled):
135
			return promql.ErrQueryCanceled(env)
136
		case errors.Is(err, context.DeadlineExceeded):
137
			return promql.ErrQueryTimeout(env)
138
		default:
139
			return err
140
		}
141
	}
142
	return nil
143
}
144

145
func TestQueryTimeout(t *testing.T) {
146
	opts := promql.EngineOpts{
147
		Logger:     nil,
148
		Reg:        nil,
149
		MaxSamples: 10,
150
		Timeout:    5 * time.Millisecond,
151
	}
152
	engine := promql.NewEngine(opts)
153
	ctx, cancelCtx := context.WithCancel(context.Background())
154
	defer cancelCtx()
155

156
	query := engine.NewTestQuery(func(ctx context.Context) error {
157
		time.Sleep(100 * time.Millisecond)
158
		return contextDone(ctx, "test statement execution")
159
	})
160

161
	res := query.Exec(ctx)
162
	require.Error(t, res.Err, "expected timeout error but got none")
163

164
	var e promql.ErrQueryTimeout
165
	require.ErrorAs(t, res.Err, &e, "expected timeout error but got: %s", res.Err)
166
}
167

168
const errQueryCanceled = promql.ErrQueryCanceled("test statement execution")
169

170
func TestQueryCancel(t *testing.T) {
171
	opts := promql.EngineOpts{
172
		Logger:     nil,
173
		Reg:        nil,
174
		MaxSamples: 10,
175
		Timeout:    10 * time.Second,
176
	}
177
	engine := promql.NewEngine(opts)
178
	ctx, cancelCtx := context.WithCancel(context.Background())
179
	defer cancelCtx()
180

181
	// Cancel a running query before it completes.
182
	block := make(chan struct{})
183
	processing := make(chan struct{})
184

185
	query1 := engine.NewTestQuery(func(ctx context.Context) error {
186
		processing <- struct{}{}
187
		<-block
188
		return contextDone(ctx, "test statement execution")
189
	})
190

191
	var res *promql.Result
192

193
	go func() {
194
		res = query1.Exec(ctx)
195
		processing <- struct{}{}
196
	}()
197

198
	<-processing
199
	query1.Cancel()
200
	block <- struct{}{}
201
	<-processing
202

203
	require.Error(t, res.Err, "expected cancellation error for query1 but got none")
204
	require.Equal(t, errQueryCanceled, res.Err)
205

206
	// Canceling a query before starting it must have no effect.
207
	query2 := engine.NewTestQuery(func(ctx context.Context) error {
208
		return contextDone(ctx, "test statement execution")
209
	})
210

211
	query2.Cancel()
212
	res = query2.Exec(ctx)
213
	require.NoError(t, res.Err)
214
}
215

216
// errQuerier implements storage.Querier which always returns error.
217
type errQuerier struct {
218
	err error
219
}
220

221
func (q *errQuerier) Select(context.Context, bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet {
222
	return errSeriesSet{err: q.err}
223
}
224

225
func (*errQuerier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) {
226
	return nil, nil, nil
227
}
228

229
func (*errQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) {
230
	return nil, nil, nil
231
}
232
func (*errQuerier) Close() error { return nil }
233

234
// errSeriesSet implements storage.SeriesSet which always returns error.
235
type errSeriesSet struct {
236
	err error
237
}
238

239
func (errSeriesSet) Next() bool                          { return false }
240
func (errSeriesSet) At() storage.Series                  { return nil }
241
func (e errSeriesSet) Err() error                        { return e.err }
242
func (e errSeriesSet) Warnings() annotations.Annotations { return nil }
243

244
func TestQueryError(t *testing.T) {
245
	opts := promql.EngineOpts{
246
		Logger:     nil,
247
		Reg:        nil,
248
		MaxSamples: 10,
249
		Timeout:    10 * time.Second,
250
	}
251
	engine := promql.NewEngine(opts)
252
	errStorage := promql.ErrStorage{errors.New("storage error")}
253
	queryable := storage.QueryableFunc(func(mint, maxt int64) (storage.Querier, error) {
254
		return &errQuerier{err: errStorage}, nil
255
	})
256
	ctx, cancelCtx := context.WithCancel(context.Background())
257
	defer cancelCtx()
258

259
	vectorQuery, err := engine.NewInstantQuery(ctx, queryable, nil, "foo", time.Unix(1, 0))
260
	require.NoError(t, err)
261

262
	res := vectorQuery.Exec(ctx)
263
	require.Error(t, res.Err, "expected error on failed select but got none")
264
	require.ErrorIs(t, res.Err, errStorage, "expected error doesn't match")
265

266
	matrixQuery, err := engine.NewInstantQuery(ctx, queryable, nil, "foo[1m]", time.Unix(1, 0))
267
	require.NoError(t, err)
268

269
	res = matrixQuery.Exec(ctx)
270
	require.Error(t, res.Err, "expected error on failed select but got none")
271
	require.ErrorIs(t, res.Err, errStorage, "expected error doesn't match")
272
}
273

274
type noopHintRecordingQueryable struct {
275
	hints []*storage.SelectHints
276
}
277

278
func (h *noopHintRecordingQueryable) Querier(int64, int64) (storage.Querier, error) {
279
	return &hintRecordingQuerier{Querier: &errQuerier{}, h: h}, nil
280
}
281

282
type hintRecordingQuerier struct {
283
	storage.Querier
284

285
	h *noopHintRecordingQueryable
286
}
287

288
func (h *hintRecordingQuerier) Select(ctx context.Context, sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.SeriesSet {
289
	h.h.hints = append(h.h.hints, hints)
290
	return h.Querier.Select(ctx, sortSeries, hints, matchers...)
291
}
292

293
func TestSelectHintsSetCorrectly(t *testing.T) {
294
	opts := promql.EngineOpts{
295
		Logger:           nil,
296
		Reg:              nil,
297
		MaxSamples:       10,
298
		Timeout:          10 * time.Second,
299
		LookbackDelta:    5 * time.Second,
300
		EnableAtModifier: true,
301
	}
302

303
	for _, tc := range []struct {
304
		query string
305

306
		// All times are in milliseconds.
307
		start int64
308
		end   int64
309

310
		// TODO(bwplotka): Add support for better hints when subquerying.
311
		expected []*storage.SelectHints
312
	}{
313
		{
314
			query: "foo", start: 10000,
315
			expected: []*storage.SelectHints{
316
				{Start: 5000, End: 10000},
317
			},
318
		}, {
319
			query: "foo @ 15", start: 10000,
320
			expected: []*storage.SelectHints{
321
				{Start: 10000, End: 15000},
322
			},
323
		}, {
324
			query: "foo @ 1", start: 10000,
325
			expected: []*storage.SelectHints{
326
				{Start: -4000, End: 1000},
327
			},
328
		}, {
329
			query: "foo[2m]", start: 200000,
330
			expected: []*storage.SelectHints{
331
				{Start: 80000, End: 200000, Range: 120000},
332
			},
333
		}, {
334
			query: "foo[2m] @ 180", start: 200000,
335
			expected: []*storage.SelectHints{
336
				{Start: 60000, End: 180000, Range: 120000},
337
			},
338
		}, {
339
			query: "foo[2m] @ 300", start: 200000,
340
			expected: []*storage.SelectHints{
341
				{Start: 180000, End: 300000, Range: 120000},
342
			},
343
		}, {
344
			query: "foo[2m] @ 60", start: 200000,
345
			expected: []*storage.SelectHints{
346
				{Start: -60000, End: 60000, Range: 120000},
347
			},
348
		}, {
349
			query: "foo[2m] offset 2m", start: 300000,
350
			expected: []*storage.SelectHints{
351
				{Start: 60000, End: 180000, Range: 120000},
352
			},
353
		}, {
354
			query: "foo[2m] @ 200 offset 2m", start: 300000,
355
			expected: []*storage.SelectHints{
356
				{Start: -40000, End: 80000, Range: 120000},
357
			},
358
		}, {
359
			query: "foo[2m:1s]", start: 300000,
360
			expected: []*storage.SelectHints{
361
				{Start: 175000, End: 300000, Step: 1000},
362
			},
363
		}, {
364
			query: "count_over_time(foo[2m:1s])", start: 300000,
365
			expected: []*storage.SelectHints{
366
				{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
367
			},
368
		}, {
369
			query: "count_over_time(foo[2m:1s] @ 300)", start: 200000,
370
			expected: []*storage.SelectHints{
371
				{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
372
			},
373
		}, {
374
			query: "count_over_time(foo[2m:1s] @ 200)", start: 200000,
375
			expected: []*storage.SelectHints{
376
				{Start: 75000, End: 200000, Func: "count_over_time", Step: 1000},
377
			},
378
		}, {
379
			query: "count_over_time(foo[2m:1s] @ 100)", start: 200000,
380
			expected: []*storage.SelectHints{
381
				{Start: -25000, End: 100000, Func: "count_over_time", Step: 1000},
382
			},
383
		}, {
384
			query: "count_over_time(foo[2m:1s] offset 10s)", start: 300000,
385
			expected: []*storage.SelectHints{
386
				{Start: 165000, End: 290000, Func: "count_over_time", Step: 1000},
387
			},
388
		}, {
389
			query: "count_over_time((foo offset 10s)[2m:1s] offset 10s)", start: 300000,
390
			expected: []*storage.SelectHints{
391
				{Start: 155000, End: 280000, Func: "count_over_time", Step: 1000},
392
			},
393
		}, {
394
			// When the @ is on the vector selector, the enclosing subquery parameters
395
			// don't affect the hint ranges.
396
			query: "count_over_time((foo @ 200 offset 10s)[2m:1s] offset 10s)", start: 300000,
397
			expected: []*storage.SelectHints{
398
				{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
399
			},
400
		}, {
401
			// When the @ is on the vector selector, the enclosing subquery parameters
402
			// don't affect the hint ranges.
403
			query: "count_over_time((foo @ 200 offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000,
404
			expected: []*storage.SelectHints{
405
				{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
406
			},
407
		}, {
408
			query: "count_over_time((foo offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000,
409
			expected: []*storage.SelectHints{
410
				{Start: -45000, End: 80000, Func: "count_over_time", Step: 1000},
411
			},
412
		}, {
413
			query: "foo", start: 10000, end: 20000,
414
			expected: []*storage.SelectHints{
415
				{Start: 5000, End: 20000, Step: 1000},
416
			},
417
		}, {
418
			query: "foo @ 15", start: 10000, end: 20000,
419
			expected: []*storage.SelectHints{
420
				{Start: 10000, End: 15000, Step: 1000},
421
			},
422
		}, {
423
			query: "foo @ 1", start: 10000, end: 20000,
424
			expected: []*storage.SelectHints{
425
				{Start: -4000, End: 1000, Step: 1000},
426
			},
427
		}, {
428
			query: "rate(foo[2m] @ 180)", start: 200000, end: 500000,
429
			expected: []*storage.SelectHints{
430
				{Start: 60000, End: 180000, Range: 120000, Func: "rate", Step: 1000},
431
			},
432
		}, {
433
			query: "rate(foo[2m] @ 300)", start: 200000, end: 500000,
434
			expected: []*storage.SelectHints{
435
				{Start: 180000, End: 300000, Range: 120000, Func: "rate", Step: 1000},
436
			},
437
		}, {
438
			query: "rate(foo[2m] @ 60)", start: 200000, end: 500000,
439
			expected: []*storage.SelectHints{
440
				{Start: -60000, End: 60000, Range: 120000, Func: "rate", Step: 1000},
441
			},
442
		}, {
443
			query: "rate(foo[2m])", start: 200000, end: 500000,
444
			expected: []*storage.SelectHints{
445
				{Start: 80000, End: 500000, Range: 120000, Func: "rate", Step: 1000},
446
			},
447
		}, {
448
			query: "rate(foo[2m] offset 2m)", start: 300000, end: 500000,
449
			expected: []*storage.SelectHints{
450
				{Start: 60000, End: 380000, Range: 120000, Func: "rate", Step: 1000},
451
			},
452
		}, {
453
			query: "rate(foo[2m:1s])", start: 300000, end: 500000,
454
			expected: []*storage.SelectHints{
455
				{Start: 175000, End: 500000, Func: "rate", Step: 1000},
456
			},
457
		}, {
458
			query: "count_over_time(foo[2m:1s])", start: 300000, end: 500000,
459
			expected: []*storage.SelectHints{
460
				{Start: 175000, End: 500000, Func: "count_over_time", Step: 1000},
461
			},
462
		}, {
463
			query: "count_over_time(foo[2m:1s] offset 10s)", start: 300000, end: 500000,
464
			expected: []*storage.SelectHints{
465
				{Start: 165000, End: 490000, Func: "count_over_time", Step: 1000},
466
			},
467
		}, {
468
			query: "count_over_time(foo[2m:1s] @ 300)", start: 200000, end: 500000,
469
			expected: []*storage.SelectHints{
470
				{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
471
			},
472
		}, {
473
			query: "count_over_time(foo[2m:1s] @ 200)", start: 200000, end: 500000,
474
			expected: []*storage.SelectHints{
475
				{Start: 75000, End: 200000, Func: "count_over_time", Step: 1000},
476
			},
477
		}, {
478
			query: "count_over_time(foo[2m:1s] @ 100)", start: 200000, end: 500000,
479
			expected: []*storage.SelectHints{
480
				{Start: -25000, End: 100000, Func: "count_over_time", Step: 1000},
481
			},
482
		}, {
483
			query: "count_over_time((foo offset 10s)[2m:1s] offset 10s)", start: 300000, end: 500000,
484
			expected: []*storage.SelectHints{
485
				{Start: 155000, End: 480000, Func: "count_over_time", Step: 1000},
486
			},
487
		}, {
488
			// When the @ is on the vector selector, the enclosing subquery parameters
489
			// don't affect the hint ranges.
490
			query: "count_over_time((foo @ 200 offset 10s)[2m:1s] offset 10s)", start: 300000, end: 500000,
491
			expected: []*storage.SelectHints{
492
				{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
493
			},
494
		}, {
495
			// When the @ is on the vector selector, the enclosing subquery parameters
496
			// don't affect the hint ranges.
497
			query: "count_over_time((foo @ 200 offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000, end: 500000,
498
			expected: []*storage.SelectHints{
499
				{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
500
			},
501
		}, {
502
			query: "count_over_time((foo offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000, end: 500000,
503
			expected: []*storage.SelectHints{
504
				{Start: -45000, End: 80000, Func: "count_over_time", Step: 1000},
505
			},
506
		}, {
507
			query: "sum by (dim1) (foo)", start: 10000,
508
			expected: []*storage.SelectHints{
509
				{Start: 5000, End: 10000, Func: "sum", By: true, Grouping: []string{"dim1"}},
510
			},
511
		}, {
512
			query: "sum without (dim1) (foo)", start: 10000,
513
			expected: []*storage.SelectHints{
514
				{Start: 5000, End: 10000, Func: "sum", Grouping: []string{"dim1"}},
515
			},
516
		}, {
517
			query: "sum by (dim1) (avg_over_time(foo[1s]))", start: 10000,
518
			expected: []*storage.SelectHints{
519
				{Start: 9000, End: 10000, Func: "avg_over_time", Range: 1000},
520
			},
521
		}, {
522
			query: "sum by (dim1) (max by (dim2) (foo))", start: 10000,
523
			expected: []*storage.SelectHints{
524
				{Start: 5000, End: 10000, Func: "max", By: true, Grouping: []string{"dim2"}},
525
			},
526
		}, {
527
			query: "(max by (dim1) (foo))[5s:1s]", start: 10000,
528
			expected: []*storage.SelectHints{
529
				{Start: 0, End: 10000, Func: "max", By: true, Grouping: []string{"dim1"}, Step: 1000},
530
			},
531
		}, {
532
			query: "(sum(http_requests{group=~\"p.*\"})+max(http_requests{group=~\"c.*\"}))[20s:5s]", start: 120000,
533
			expected: []*storage.SelectHints{
534
				{Start: 95000, End: 120000, Func: "sum", By: true, Step: 5000},
535
				{Start: 95000, End: 120000, Func: "max", By: true, Step: 5000},
536
			},
537
		}, {
538
			query: "foo @ 50 + bar @ 250 + baz @ 900", start: 100000, end: 500000,
539
			expected: []*storage.SelectHints{
540
				{Start: 45000, End: 50000, Step: 1000},
541
				{Start: 245000, End: 250000, Step: 1000},
542
				{Start: 895000, End: 900000, Step: 1000},
543
			},
544
		}, {
545
			query: "foo @ 50 + bar + baz @ 900", start: 100000, end: 500000,
546
			expected: []*storage.SelectHints{
547
				{Start: 45000, End: 50000, Step: 1000},
548
				{Start: 95000, End: 500000, Step: 1000},
549
				{Start: 895000, End: 900000, Step: 1000},
550
			},
551
		}, {
552
			query: "rate(foo[2s] @ 50) + bar @ 250 + baz @ 900", start: 100000, end: 500000,
553
			expected: []*storage.SelectHints{
554
				{Start: 48000, End: 50000, Step: 1000, Func: "rate", Range: 2000},
555
				{Start: 245000, End: 250000, Step: 1000},
556
				{Start: 895000, End: 900000, Step: 1000},
557
			},
558
		}, {
559
			query: "rate(foo[2s:1s] @ 50) + bar + baz", start: 100000, end: 500000,
560
			expected: []*storage.SelectHints{
561
				{Start: 43000, End: 50000, Step: 1000, Func: "rate"},
562
				{Start: 95000, End: 500000, Step: 1000},
563
				{Start: 95000, End: 500000, Step: 1000},
564
			},
565
		}, {
566
			query: "rate(foo[2s:1s] @ 50) + bar + rate(baz[2m:1s] @ 900 offset 2m) ", start: 100000, end: 500000,
567
			expected: []*storage.SelectHints{
568
				{Start: 43000, End: 50000, Step: 1000, Func: "rate"},
569
				{Start: 95000, End: 500000, Step: 1000},
570
				{Start: 655000, End: 780000, Step: 1000, Func: "rate"},
571
			},
572
		}, { // Hints are based on the inner most subquery timestamp.
573
			query: `sum_over_time(sum_over_time(metric{job="1"}[100s])[100s:25s] @ 50)[3s:1s] @ 3000`, start: 100000,
574
			expected: []*storage.SelectHints{
575
				{Start: -150000, End: 50000, Range: 100000, Func: "sum_over_time", Step: 25000},
576
			},
577
		}, { // Hints are based on the inner most subquery timestamp.
578
			query: `sum_over_time(sum_over_time(metric{job="1"}[100s])[100s:25s] @ 3000)[3s:1s] @ 50`,
579
			expected: []*storage.SelectHints{
580
				{Start: 2800000, End: 3000000, Range: 100000, Func: "sum_over_time", Step: 25000},
581
			},
582
		},
583
	} {
584
		t.Run(tc.query, func(t *testing.T) {
585
			engine := promql.NewEngine(opts)
586
			hintsRecorder := &noopHintRecordingQueryable{}
587

588
			var (
589
				query promql.Query
590
				err   error
591
			)
592
			ctx := context.Background()
593

594
			if tc.end == 0 {
595
				query, err = engine.NewInstantQuery(ctx, hintsRecorder, nil, tc.query, timestamp.Time(tc.start))
596
			} else {
597
				query, err = engine.NewRangeQuery(ctx, hintsRecorder, nil, tc.query, timestamp.Time(tc.start), timestamp.Time(tc.end), time.Second)
598
			}
599
			require.NoError(t, err)
600

601
			res := query.Exec(context.Background())
602
			require.NoError(t, res.Err)
603

604
			require.Equal(t, tc.expected, hintsRecorder.hints)
605
		})
606
	}
607
}
608

609
func TestEngineShutdown(t *testing.T) {
610
	opts := promql.EngineOpts{
611
		Logger:     nil,
612
		Reg:        nil,
613
		MaxSamples: 10,
614
		Timeout:    10 * time.Second,
615
	}
616
	engine := promql.NewEngine(opts)
617
	ctx, cancelCtx := context.WithCancel(context.Background())
618

619
	block := make(chan struct{})
620
	processing := make(chan struct{})
621

622
	// Shutdown engine on first handler execution. Should handler execution ever become
623
	// concurrent this test has to be adjusted accordingly.
624
	f := func(ctx context.Context) error {
625
		processing <- struct{}{}
626
		<-block
627
		return contextDone(ctx, "test statement execution")
628
	}
629
	query1 := engine.NewTestQuery(f)
630

631
	// Stopping the engine must cancel the base context. While executing queries is
632
	// still possible, their context is canceled from the beginning and execution should
633
	// terminate immediately.
634

635
	var res *promql.Result
636
	go func() {
637
		res = query1.Exec(ctx)
638
		processing <- struct{}{}
639
	}()
640

641
	<-processing
642
	cancelCtx()
643
	block <- struct{}{}
644
	<-processing
645

646
	require.Error(t, res.Err, "expected error on shutdown during query but got none")
647
	require.Equal(t, errQueryCanceled, res.Err)
648

649
	query2 := engine.NewTestQuery(func(context.Context) error {
650
		require.FailNow(t, "reached query execution unexpectedly")
651
		return nil
652
	})
653

654
	// The second query is started after the engine shut down. It must
655
	// be canceled immediately.
656
	res2 := query2.Exec(ctx)
657
	require.Error(t, res2.Err, "expected error on querying with canceled context but got none")
658

659
	var e promql.ErrQueryCanceled
660
	require.ErrorAs(t, res2.Err, &e, "expected cancellation error but got: %s", res2.Err)
661
}
662

663
func TestEngineEvalStmtTimestamps(t *testing.T) {
664
	storage := promqltest.LoadedStorage(t, `
665
load 10s
666
  metric 1 2
667
`)
668
	t.Cleanup(func() { storage.Close() })
669

670
	cases := []struct {
671
		Query       string
672
		Result      parser.Value
673
		Start       time.Time
674
		End         time.Time
675
		Interval    time.Duration
676
		ShouldError bool
677
	}{
678
		// Instant queries.
679
		{
680
			Query:  "1",
681
			Result: promql.Scalar{V: 1, T: 1000},
682
			Start:  time.Unix(1, 0),
683
		},
684
		{
685
			Query: "metric",
686
			Result: promql.Vector{
687
				promql.Sample{
688
					F:      1,
689
					T:      1000,
690
					Metric: labels.FromStrings("__name__", "metric"),
691
				},
692
			},
693
			Start: time.Unix(1, 0),
694
		},
695
		{
696
			Query: "metric[20s]",
697
			Result: promql.Matrix{
698
				promql.Series{
699
					Floats: []promql.FPoint{{F: 1, T: 0}, {F: 2, T: 10000}},
700
					Metric: labels.FromStrings("__name__", "metric"),
701
				},
702
			},
703
			Start: time.Unix(10, 0),
704
		},
705
		// Range queries.
706
		{
707
			Query: "1",
708
			Result: promql.Matrix{
709
				promql.Series{
710
					Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 1000}, {F: 1, T: 2000}},
711
					Metric: labels.EmptyLabels(),
712
				},
713
			},
714
			Start:    time.Unix(0, 0),
715
			End:      time.Unix(2, 0),
716
			Interval: time.Second,
717
		},
718
		{
719
			Query: "metric",
720
			Result: promql.Matrix{
721
				promql.Series{
722
					Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 1000}, {F: 1, T: 2000}},
723
					Metric: labels.FromStrings("__name__", "metric"),
724
				},
725
			},
726
			Start:    time.Unix(0, 0),
727
			End:      time.Unix(2, 0),
728
			Interval: time.Second,
729
		},
730
		{
731
			Query: "metric",
732
			Result: promql.Matrix{
733
				promql.Series{
734
					Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
735
					Metric: labels.FromStrings("__name__", "metric"),
736
				},
737
			},
738
			Start:    time.Unix(0, 0),
739
			End:      time.Unix(10, 0),
740
			Interval: 5 * time.Second,
741
		},
742
		{
743
			Query:       `count_values("wrong label!", metric)`,
744
			ShouldError: true,
745
		},
746
	}
747

748
	for i, c := range cases {
749
		t.Run(fmt.Sprintf("%d query=%s", i, c.Query), func(t *testing.T) {
750
			var err error
751
			var qry promql.Query
752
			engine := newTestEngine()
753
			if c.Interval == 0 {
754
				qry, err = engine.NewInstantQuery(context.Background(), storage, nil, c.Query, c.Start)
755
			} else {
756
				qry, err = engine.NewRangeQuery(context.Background(), storage, nil, c.Query, c.Start, c.End, c.Interval)
757
			}
758
			require.NoError(t, err)
759

760
			res := qry.Exec(context.Background())
761
			if c.ShouldError {
762
				require.Error(t, res.Err, "expected error for the query %q", c.Query)
763
				return
764
			}
765

766
			require.NoError(t, res.Err)
767
			require.Equal(t, c.Result, res.Value, "query %q failed", c.Query)
768
		})
769
	}
770
}
771

772
func TestQueryStatistics(t *testing.T) {
773
	storage := promqltest.LoadedStorage(t, `
774
load 10s
775
  metricWith1SampleEvery10Seconds 1+1x100
776
  metricWith3SampleEvery10Seconds{a="1",b="1"} 1+1x100
777
  metricWith3SampleEvery10Seconds{a="2",b="2"} 1+1x100
778
  metricWith3SampleEvery10Seconds{a="3",b="2"} 1+1x100
779
  metricWith1HistogramEvery10Seconds {{schema:1 count:5 sum:20 buckets:[1 2 1 1]}}+{{schema:1 count:10 sum:5 buckets:[1 2 3 4]}}x100
780
`)
781
	t.Cleanup(func() { storage.Close() })
782

783
	cases := []struct {
784
		Query               string
785
		SkipMaxCheck        bool
786
		TotalSamples        int64
787
		TotalSamplesPerStep stats.TotalSamplesPerStep
788
		PeakSamples         int
789
		Start               time.Time
790
		End                 time.Time
791
		Interval            time.Duration
792
	}{
793
		{
794
			Query:        `"literal string"`,
795
			SkipMaxCheck: true, // This can't fail from a max samples limit.
796
			Start:        time.Unix(21, 0),
797
			TotalSamples: 0,
798
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
799
				21000: 0,
800
			},
801
		},
802
		{
803
			Query:        "1",
804
			Start:        time.Unix(21, 0),
805
			TotalSamples: 0,
806
			PeakSamples:  1,
807
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
808
				21000: 0,
809
			},
810
		},
811
		{
812
			Query:        "metricWith1SampleEvery10Seconds",
813
			Start:        time.Unix(21, 0),
814
			PeakSamples:  1,
815
			TotalSamples: 1, // 1 sample / 10 seconds
816
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
817
				21000: 1,
818
			},
819
		},
820
		{
821
			Query:        "metricWith1HistogramEvery10Seconds",
822
			Start:        time.Unix(21, 0),
823
			PeakSamples:  12,
824
			TotalSamples: 12, // 1 histogram sample of size 12 / 10 seconds
825
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
826
				21000: 12,
827
			},
828
		},
829
		{
830
			// timestamp function has a special handling.
831
			Query:        "timestamp(metricWith1SampleEvery10Seconds)",
832
			Start:        time.Unix(21, 0),
833
			PeakSamples:  2,
834
			TotalSamples: 1, // 1 sample / 10 seconds
835
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
836
				21000: 1,
837
			},
838
		},
839
		{
840
			Query:        "timestamp(metricWith1HistogramEvery10Seconds)",
841
			Start:        time.Unix(21, 0),
842
			PeakSamples:  2,
843
			TotalSamples: 1, // 1 float sample (because of timestamp) / 10 seconds
844
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
845
				21000: 1,
846
			},
847
		},
848
		{
849
			Query:        "metricWith1SampleEvery10Seconds",
850
			Start:        time.Unix(22, 0),
851
			PeakSamples:  1,
852
			TotalSamples: 1, // 1 sample / 10 seconds
853
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
854
				22000: 1, // Aligned to the step time, not the sample time.
855
			},
856
		},
857
		{
858
			Query:        "metricWith1SampleEvery10Seconds offset 10s",
859
			Start:        time.Unix(21, 0),
860
			PeakSamples:  1,
861
			TotalSamples: 1, // 1 sample / 10 seconds
862
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
863
				21000: 1,
864
			},
865
		},
866
		{
867
			Query:        "metricWith1SampleEvery10Seconds @ 15",
868
			Start:        time.Unix(21, 0),
869
			PeakSamples:  1,
870
			TotalSamples: 1, // 1 sample / 10 seconds
871
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
872
				21000: 1,
873
			},
874
		},
875
		{
876
			Query:        `metricWith3SampleEvery10Seconds{a="1"}`,
877
			Start:        time.Unix(21, 0),
878
			PeakSamples:  1,
879
			TotalSamples: 1, // 1 sample / 10 seconds
880
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
881
				21000: 1,
882
			},
883
		},
884
		{
885
			Query:        `metricWith3SampleEvery10Seconds{a="1"} @ 19`,
886
			Start:        time.Unix(21, 0),
887
			PeakSamples:  1,
888
			TotalSamples: 1, // 1 sample / 10 seconds
889
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
890
				21000: 1,
891
			},
892
		},
893
		{
894
			Query:        `metricWith3SampleEvery10Seconds{a="1"}[20s] @ 19`,
895
			Start:        time.Unix(21, 0),
896
			PeakSamples:  2,
897
			TotalSamples: 2, // (1 sample / 10 seconds) * 20s
898
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
899
				21000: 2,
900
			},
901
		},
902
		{
903
			Query:        "metricWith3SampleEvery10Seconds",
904
			Start:        time.Unix(21, 0),
905
			PeakSamples:  3,
906
			TotalSamples: 3, // 3 samples / 10 seconds
907
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
908
				21000: 3,
909
			},
910
		},
911
		{
912
			Query:        "metricWith1SampleEvery10Seconds[60s]",
913
			Start:        time.Unix(201, 0),
914
			PeakSamples:  6,
915
			TotalSamples: 6, // 1 sample / 10 seconds * 60 seconds
916
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
917
				201000: 6,
918
			},
919
		},
920
		{
921
			Query:        "metricWith1HistogramEvery10Seconds[60s]",
922
			Start:        time.Unix(201, 0),
923
			PeakSamples:  72,
924
			TotalSamples: 72, // 1 histogram (size 12) / 10 seconds * 60 seconds
925
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
926
				201000: 72,
927
			},
928
		},
929
		{
930
			Query:        "max_over_time(metricWith1SampleEvery10Seconds[59s])[20s:5s]",
931
			Start:        time.Unix(201, 0),
932
			PeakSamples:  10,
933
			TotalSamples: 24, // (1 sample / 10 seconds * 60 seconds) * 20/5 (using 59s so we always return 6 samples
934
			// as if we run a query on 00 looking back 60 seconds we will return 7 samples;
935
			// see next test).
936
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
937
				201000: 24,
938
			},
939
		},
940
		{
941
			Query:        "max_over_time(metricWith1SampleEvery10Seconds[60s])[20s:5s]",
942
			Start:        time.Unix(201, 0),
943
			PeakSamples:  11,
944
			TotalSamples: 26, // (1 sample / 10 seconds * 60 seconds) * 4 + 2 as
945
			// max_over_time(metricWith1SampleEvery10Seconds[60s]) @ 190 and 200 will return 7 samples.
946
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
947
				201000: 26,
948
			},
949
		},
950
		{
951
			Query:        "max_over_time(metricWith1HistogramEvery10Seconds[60s])[20s:5s]",
952
			Start:        time.Unix(201, 0),
953
			PeakSamples:  72,
954
			TotalSamples: 312, // (1 histogram (size 12) / 10 seconds * 60 seconds) * 4 + 2 * 12 as
955
			// max_over_time(metricWith1SampleEvery10Seconds[60s]) @ 190 and 200 will return 7 samples.
956
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
957
				201000: 312,
958
			},
959
		},
960
		{
961
			Query:        "metricWith1SampleEvery10Seconds[60s] @ 30",
962
			Start:        time.Unix(201, 0),
963
			PeakSamples:  4,
964
			TotalSamples: 4, // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 1 series
965
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
966
				201000: 4,
967
			},
968
		},
969
		{
970
			Query:        "metricWith1HistogramEvery10Seconds[60s] @ 30",
971
			Start:        time.Unix(201, 0),
972
			PeakSamples:  48,
973
			TotalSamples: 48, // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 1 series
974
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
975
				201000: 48,
976
			},
977
		},
978
		{
979
			Query:        "sum(max_over_time(metricWith3SampleEvery10Seconds[60s] @ 30))",
980
			Start:        time.Unix(201, 0),
981
			PeakSamples:  7,
982
			TotalSamples: 12, // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 3 series
983
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
984
				201000: 12,
985
			},
986
		},
987
		{
988
			Query:        "sum by (b) (max_over_time(metricWith3SampleEvery10Seconds[60s] @ 30))",
989
			Start:        time.Unix(201, 0),
990
			PeakSamples:  7,
991
			TotalSamples: 12, // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 3 series
992
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
993
				201000: 12,
994
			},
995
		},
996
		{
997
			Query:        "metricWith1SampleEvery10Seconds[60s] offset 10s",
998
			Start:        time.Unix(201, 0),
999
			PeakSamples:  6,
1000
			TotalSamples: 6, // 1 sample / 10 seconds * 60 seconds
1001
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1002
				201000: 6,
1003
			},
1004
		},
1005
		{
1006
			Query:        "metricWith3SampleEvery10Seconds[60s]",
1007
			Start:        time.Unix(201, 0),
1008
			PeakSamples:  18,
1009
			TotalSamples: 18, // 3 sample / 10 seconds * 60 seconds
1010
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1011
				201000: 18,
1012
			},
1013
		},
1014
		{
1015
			Query:        "max_over_time(metricWith1SampleEvery10Seconds[60s])",
1016
			Start:        time.Unix(201, 0),
1017
			PeakSamples:  7,
1018
			TotalSamples: 6, // 1 sample / 10 seconds * 60 seconds
1019
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1020
				201000: 6,
1021
			},
1022
		},
1023
		{
1024
			Query:        "absent_over_time(metricWith1SampleEvery10Seconds[60s])",
1025
			Start:        time.Unix(201, 0),
1026
			PeakSamples:  7,
1027
			TotalSamples: 6, // 1 sample / 10 seconds * 60 seconds
1028
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1029
				201000: 6,
1030
			},
1031
		},
1032
		{
1033
			Query:        "max_over_time(metricWith3SampleEvery10Seconds[60s])",
1034
			Start:        time.Unix(201, 0),
1035
			PeakSamples:  9,
1036
			TotalSamples: 18, // 3 sample / 10 seconds * 60 seconds
1037
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1038
				201000: 18,
1039
			},
1040
		},
1041
		{
1042
			Query:        "metricWith1SampleEvery10Seconds[60s:5s]",
1043
			Start:        time.Unix(201, 0),
1044
			PeakSamples:  12,
1045
			TotalSamples: 12, // 1 sample per query * 12 queries (60/5)
1046
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1047
				201000: 12,
1048
			},
1049
		},
1050
		{
1051
			Query:        "metricWith1SampleEvery10Seconds[60s:5s] offset 10s",
1052
			Start:        time.Unix(201, 0),
1053
			PeakSamples:  12,
1054
			TotalSamples: 12, // 1 sample per query * 12 queries (60/5)
1055
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1056
				201000: 12,
1057
			},
1058
		},
1059
		{
1060
			Query:        "max_over_time(metricWith3SampleEvery10Seconds[60s:5s])",
1061
			Start:        time.Unix(201, 0),
1062
			PeakSamples:  51,
1063
			TotalSamples: 36, // 3 sample per query * 12 queries (60/5)
1064
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1065
				201000: 36,
1066
			},
1067
		},
1068
		{
1069
			Query:        "sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s])) + sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s]))",
1070
			Start:        time.Unix(201, 0),
1071
			PeakSamples:  52,
1072
			TotalSamples: 72, // 2 * (3 sample per query * 12 queries (60/5))
1073
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1074
				201000: 72,
1075
			},
1076
		},
1077
		{
1078
			Query:        `metricWith3SampleEvery10Seconds{a="1"}`,
1079
			Start:        time.Unix(201, 0),
1080
			End:          time.Unix(220, 0),
1081
			Interval:     5 * time.Second,
1082
			PeakSamples:  4,
1083
			TotalSamples: 4, // 1 sample per query * 4 steps
1084
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1085
				201000: 1,
1086
				206000: 1,
1087
				211000: 1,
1088
				216000: 1,
1089
			},
1090
		},
1091
		{
1092
			Query:        `metricWith3SampleEvery10Seconds{a="1"}`,
1093
			Start:        time.Unix(204, 0),
1094
			End:          time.Unix(223, 0),
1095
			Interval:     5 * time.Second,
1096
			PeakSamples:  4,
1097
			TotalSamples: 4, // 1 sample per query * 4 steps
1098
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1099
				204000: 1, // aligned to the step time, not the sample time
1100
				209000: 1,
1101
				214000: 1,
1102
				219000: 1,
1103
			},
1104
		},
1105
		{
1106
			Query:        `metricWith1HistogramEvery10Seconds`,
1107
			Start:        time.Unix(204, 0),
1108
			End:          time.Unix(223, 0),
1109
			Interval:     5 * time.Second,
1110
			PeakSamples:  48,
1111
			TotalSamples: 48, // 1 histogram (size 12) per query * 4 steps
1112
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1113
				204000: 12, // aligned to the step time, not the sample time
1114
				209000: 12,
1115
				214000: 12,
1116
				219000: 12,
1117
			},
1118
		},
1119
		{
1120
			// timestamp function has a special handling
1121
			Query:        "timestamp(metricWith1SampleEvery10Seconds)",
1122
			Start:        time.Unix(201, 0),
1123
			End:          time.Unix(220, 0),
1124
			Interval:     5 * time.Second,
1125
			PeakSamples:  5,
1126
			TotalSamples: 4, // 1 sample per query * 4 steps
1127
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1128
				201000: 1,
1129
				206000: 1,
1130
				211000: 1,
1131
				216000: 1,
1132
			},
1133
		},
1134
		{
1135
			// timestamp function has a special handling
1136
			Query:        "timestamp(metricWith1HistogramEvery10Seconds)",
1137
			Start:        time.Unix(201, 0),
1138
			End:          time.Unix(220, 0),
1139
			Interval:     5 * time.Second,
1140
			PeakSamples:  5,
1141
			TotalSamples: 4, // 1 sample per query * 4 steps
1142
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1143
				201000: 1,
1144
				206000: 1,
1145
				211000: 1,
1146
				216000: 1,
1147
			},
1148
		},
1149
		{
1150
			Query:        `max_over_time(metricWith3SampleEvery10Seconds{a="1"}[10s])`,
1151
			Start:        time.Unix(991, 0),
1152
			End:          time.Unix(1021, 0),
1153
			Interval:     10 * time.Second,
1154
			PeakSamples:  2,
1155
			TotalSamples: 2, // 1 sample per query * 2 steps with data
1156
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1157
				991000:  1,
1158
				1001000: 1,
1159
				1011000: 0,
1160
				1021000: 0,
1161
			},
1162
		},
1163
		{
1164
			Query:        `metricWith3SampleEvery10Seconds{a="1"} offset 10s`,
1165
			Start:        time.Unix(201, 0),
1166
			End:          time.Unix(220, 0),
1167
			Interval:     5 * time.Second,
1168
			PeakSamples:  4,
1169
			TotalSamples: 4, // 1 sample per query * 4 steps
1170
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1171
				201000: 1,
1172
				206000: 1,
1173
				211000: 1,
1174
				216000: 1,
1175
			},
1176
		},
1177
		{
1178
			Query:        "max_over_time(metricWith3SampleEvery10Seconds[60s] @ 30)",
1179
			Start:        time.Unix(201, 0),
1180
			End:          time.Unix(220, 0),
1181
			Interval:     5 * time.Second,
1182
			PeakSamples:  12,
1183
			TotalSamples: 48, // @ modifier force the evaluation timestamp at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 3 series * 4 steps
1184
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1185
				201000: 12,
1186
				206000: 12,
1187
				211000: 12,
1188
				216000: 12,
1189
			},
1190
		},
1191
		{
1192
			Query:        `metricWith3SampleEvery10Seconds`,
1193
			Start:        time.Unix(201, 0),
1194
			End:          time.Unix(220, 0),
1195
			PeakSamples:  12,
1196
			Interval:     5 * time.Second,
1197
			TotalSamples: 12, // 3 sample per query * 4 steps
1198
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1199
				201000: 3,
1200
				206000: 3,
1201
				211000: 3,
1202
				216000: 3,
1203
			},
1204
		},
1205
		{
1206
			Query:        `max_over_time(metricWith3SampleEvery10Seconds[60s])`,
1207
			Start:        time.Unix(201, 0),
1208
			End:          time.Unix(220, 0),
1209
			Interval:     5 * time.Second,
1210
			PeakSamples:  18,
1211
			TotalSamples: 72, // (3 sample / 10 seconds * 60 seconds) * 4 steps = 72
1212
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1213
				201000: 18,
1214
				206000: 18,
1215
				211000: 18,
1216
				216000: 18,
1217
			},
1218
		},
1219
		{
1220
			Query:        "max_over_time(metricWith3SampleEvery10Seconds[60s:5s])",
1221
			Start:        time.Unix(201, 0),
1222
			End:          time.Unix(220, 0),
1223
			Interval:     5 * time.Second,
1224
			PeakSamples:  72,
1225
			TotalSamples: 144, // 3 sample per query * 12 queries (60/5) * 4 steps
1226
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1227
				201000: 36,
1228
				206000: 36,
1229
				211000: 36,
1230
				216000: 36,
1231
			},
1232
		},
1233
		{
1234
			Query:        "max_over_time(metricWith1SampleEvery10Seconds[60s:5s])",
1235
			Start:        time.Unix(201, 0),
1236
			End:          time.Unix(220, 0),
1237
			Interval:     5 * time.Second,
1238
			PeakSamples:  32,
1239
			TotalSamples: 48, // 1 sample per query * 12 queries (60/5) * 4 steps
1240
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1241
				201000: 12,
1242
				206000: 12,
1243
				211000: 12,
1244
				216000: 12,
1245
			},
1246
		},
1247
		{
1248
			Query:        "sum by (b) (max_over_time(metricWith1SampleEvery10Seconds[60s:5s]))",
1249
			Start:        time.Unix(201, 0),
1250
			End:          time.Unix(220, 0),
1251
			Interval:     5 * time.Second,
1252
			PeakSamples:  32,
1253
			TotalSamples: 48, // 1 sample per query * 12 queries (60/5) * 4 steps
1254
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1255
				201000: 12,
1256
				206000: 12,
1257
				211000: 12,
1258
				216000: 12,
1259
			},
1260
		},
1261
		{
1262
			Query:        "sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s])) + sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s]))",
1263
			Start:        time.Unix(201, 0),
1264
			End:          time.Unix(220, 0),
1265
			Interval:     5 * time.Second,
1266
			PeakSamples:  76,
1267
			TotalSamples: 288, // 2 * (3 sample per query * 12 queries (60/5) * 4 steps)
1268
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1269
				201000: 72,
1270
				206000: 72,
1271
				211000: 72,
1272
				216000: 72,
1273
			},
1274
		},
1275
		{
1276
			Query:        "sum(max_over_time(metricWith3SampleEvery10Seconds[60s:5s])) + sum(max_over_time(metricWith1SampleEvery10Seconds[60s:5s]))",
1277
			Start:        time.Unix(201, 0),
1278
			End:          time.Unix(220, 0),
1279
			Interval:     5 * time.Second,
1280
			PeakSamples:  72,
1281
			TotalSamples: 192, // (1 sample per query * 12 queries (60/5) + 3 sample per query * 12 queries (60/5)) * 4 steps
1282
			TotalSamplesPerStep: stats.TotalSamplesPerStep{
1283
				201000: 48,
1284
				206000: 48,
1285
				211000: 48,
1286
				216000: 48,
1287
			},
1288
		},
1289
	}
1290

1291
	for _, c := range cases {
1292
		t.Run(c.Query, func(t *testing.T) {
1293
			opts := promql.NewPrometheusQueryOpts(true, 0)
1294
			engine := promqltest.NewTestEngine(true, 0, promqltest.DefaultMaxSamplesPerQuery)
1295

1296
			runQuery := func(expErr error) *stats.Statistics {
1297
				var err error
1298
				var qry promql.Query
1299
				if c.Interval == 0 {
1300
					qry, err = engine.NewInstantQuery(context.Background(), storage, opts, c.Query, c.Start)
1301
				} else {
1302
					qry, err = engine.NewRangeQuery(context.Background(), storage, opts, c.Query, c.Start, c.End, c.Interval)
1303
				}
1304
				require.NoError(t, err)
1305

1306
				res := qry.Exec(context.Background())
1307
				require.Equal(t, expErr, res.Err)
1308

1309
				return qry.Stats()
1310
			}
1311

1312
			stats := runQuery(nil)
1313
			require.Equal(t, c.TotalSamples, stats.Samples.TotalSamples, "Total samples mismatch")
1314
			require.Equal(t, &c.TotalSamplesPerStep, stats.Samples.TotalSamplesPerStepMap(), "Total samples per time mismatch")
1315
			require.Equal(t, c.PeakSamples, stats.Samples.PeakSamples, "Peak samples mismatch")
1316

1317
			// Check that the peak is correct by setting the max to one less.
1318
			if c.SkipMaxCheck {
1319
				return
1320
			}
1321
			engine = promqltest.NewTestEngine(true, 0, stats.Samples.PeakSamples-1)
1322
			runQuery(promql.ErrTooManySamples(env))
1323
		})
1324
	}
1325
}
1326

1327
func TestMaxQuerySamples(t *testing.T) {
1328
	storage := promqltest.LoadedStorage(t, `
1329
load 10s
1330
  metric 1+1x100
1331
  bigmetric{a="1"} 1+1x100
1332
  bigmetric{a="2"} 1+1x100
1333
`)
1334
	t.Cleanup(func() { storage.Close() })
1335

1336
	// These test cases should be touching the limit exactly (hence no exceeding).
1337
	// Exceeding the limit will be tested by doing -1 to the MaxSamples.
1338
	cases := []struct {
1339
		Query      string
1340
		MaxSamples int
1341
		Start      time.Time
1342
		End        time.Time
1343
		Interval   time.Duration
1344
	}{
1345
		// Instant queries.
1346
		{
1347
			Query:      "1",
1348
			MaxSamples: 1,
1349
			Start:      time.Unix(1, 0),
1350
		},
1351
		{
1352
			Query:      "metric",
1353
			MaxSamples: 1,
1354
			Start:      time.Unix(1, 0),
1355
		},
1356
		{
1357
			Query:      "metric[20s]",
1358
			MaxSamples: 2,
1359
			Start:      time.Unix(10, 0),
1360
		},
1361
		{
1362
			Query:      "rate(metric[20s])",
1363
			MaxSamples: 3,
1364
			Start:      time.Unix(10, 0),
1365
		},
1366
		{
1367
			Query:      "metric[20s:5s]",
1368
			MaxSamples: 3,
1369
			Start:      time.Unix(10, 0),
1370
		},
1371
		{
1372
			Query:      "metric[20s] @ 10",
1373
			MaxSamples: 2,
1374
			Start:      time.Unix(0, 0),
1375
		},
1376
		// Range queries.
1377
		{
1378
			Query:      "1",
1379
			MaxSamples: 3,
1380
			Start:      time.Unix(0, 0),
1381
			End:        time.Unix(2, 0),
1382
			Interval:   time.Second,
1383
		},
1384
		{
1385
			Query:      "1",
1386
			MaxSamples: 3,
1387
			Start:      time.Unix(0, 0),
1388
			End:        time.Unix(2, 0),
1389
			Interval:   time.Second,
1390
		},
1391
		{
1392
			Query:      "metric",
1393
			MaxSamples: 3,
1394
			Start:      time.Unix(0, 0),
1395
			End:        time.Unix(2, 0),
1396
			Interval:   time.Second,
1397
		},
1398
		{
1399
			Query:      "metric",
1400
			MaxSamples: 3,
1401
			Start:      time.Unix(0, 0),
1402
			End:        time.Unix(10, 0),
1403
			Interval:   5 * time.Second,
1404
		},
1405
		{
1406
			Query:      "rate(bigmetric[1s])",
1407
			MaxSamples: 1,
1408
			Start:      time.Unix(0, 0),
1409
			End:        time.Unix(10, 0),
1410
			Interval:   5 * time.Second,
1411
		},
1412
		{
1413
			// Result is duplicated, so @ also produces 3 samples.
1414
			Query:      "metric @ 10",
1415
			MaxSamples: 3,
1416
			Start:      time.Unix(0, 0),
1417
			End:        time.Unix(10, 0),
1418
			Interval:   5 * time.Second,
1419
		},
1420
		{
1421
			// The peak samples in memory is during the first evaluation:
1422
			//   - Subquery takes 22 samples, 11 for each bigmetric,
1423
			//   - Result is calculated per series where the series samples is buffered, hence 11 more here.
1424
			//   - The result of two series is added before the last series buffer is discarded, so 2 more here.
1425
			//   Hence at peak it is 22 (subquery) + 11 (buffer of a series) + 2 (result from 2 series).
1426
			// The subquery samples and the buffer is discarded before duplicating.
1427
			Query:      `rate(bigmetric[10s:1s] @ 10)`,
1428
			MaxSamples: 35,
1429
			Start:      time.Unix(0, 0),
1430
			End:        time.Unix(10, 0),
1431
			Interval:   5 * time.Second,
1432
		},
1433
		{
1434
			// Here the reasoning is same as above. But LHS and RHS are done one after another.
1435
			// So while one of them takes 35 samples at peak, we need to hold the 2 sample
1436
			// result of the other till then.
1437
			Query:      `rate(bigmetric[10s:1s] @ 10) + rate(bigmetric[10s:1s] @ 30)`,
1438
			MaxSamples: 37,
1439
			Start:      time.Unix(0, 0),
1440
			End:        time.Unix(10, 0),
1441
			Interval:   5 * time.Second,
1442
		},
1443
		{
1444
			// promql.Sample as above but with only 1 part as step invariant.
1445
			// Here the peak is caused by the non-step invariant part as it touches more time range.
1446
			// Hence at peak it is 2*21 (subquery from 0s to 20s)
1447
			//                     + 11 (buffer of a series per evaluation)
1448
			//                     + 6 (result from 2 series at 3 eval times).
1449
			Query:      `rate(bigmetric[10s:1s]) + rate(bigmetric[10s:1s] @ 30)`,
1450
			MaxSamples: 59,
1451
			Start:      time.Unix(10, 0),
1452
			End:        time.Unix(20, 0),
1453
			Interval:   5 * time.Second,
1454
		},
1455
		{
1456
			// Nested subquery.
1457
			// We saw that innermost rate takes 35 samples which is still the peak
1458
			// since the other two subqueries just duplicate the result.
1459
			Query:      `rate(rate(bigmetric[10s:1s] @ 10)[100s:25s] @ 1000)[100s:20s] @ 2000`,
1460
			MaxSamples: 35,
1461
			Start:      time.Unix(10, 0),
1462
		},
1463
		{
1464
			// Nested subquery.
1465
			// Now the outmost subquery produces more samples than inner most rate.
1466
			Query:      `rate(rate(bigmetric[10s:1s] @ 10)[100s:25s] @ 1000)[17s:1s] @ 2000`,
1467
			MaxSamples: 36,
1468
			Start:      time.Unix(10, 0),
1469
		},
1470
	}
1471

1472
	for _, c := range cases {
1473
		t.Run(c.Query, func(t *testing.T) {
1474
			engine := newTestEngine()
1475
			testFunc := func(expError error) {
1476
				var err error
1477
				var qry promql.Query
1478
				if c.Interval == 0 {
1479
					qry, err = engine.NewInstantQuery(context.Background(), storage, nil, c.Query, c.Start)
1480
				} else {
1481
					qry, err = engine.NewRangeQuery(context.Background(), storage, nil, c.Query, c.Start, c.End, c.Interval)
1482
				}
1483
				require.NoError(t, err)
1484

1485
				res := qry.Exec(context.Background())
1486
				stats := qry.Stats()
1487
				require.Equal(t, expError, res.Err)
1488
				require.NotNil(t, stats)
1489
				if expError == nil {
1490
					require.Equal(t, c.MaxSamples, stats.Samples.PeakSamples, "peak samples mismatch for query %q", c.Query)
1491
				}
1492
			}
1493

1494
			// Within limit.
1495
			engine = promqltest.NewTestEngine(false, 0, c.MaxSamples)
1496
			testFunc(nil)
1497

1498
			// Exceeding limit.
1499
			engine = promqltest.NewTestEngine(false, 0, c.MaxSamples-1)
1500
			testFunc(promql.ErrTooManySamples(env))
1501
		})
1502
	}
1503
}
1504

1505
func TestAtModifier(t *testing.T) {
1506
	engine := newTestEngine()
1507
	storage := promqltest.LoadedStorage(t, `
1508
load 10s
1509
  metric{job="1"} 0+1x1000
1510
  metric{job="2"} 0+2x1000
1511
  metric_topk{instance="1"} 0+1x1000
1512
  metric_topk{instance="2"} 0+2x1000
1513
  metric_topk{instance="3"} 1000-1x1000
1514

1515
load 1ms
1516
  metric_ms 0+1x10000
1517
`)
1518
	t.Cleanup(func() { storage.Close() })
1519

1520
	lbls1 := labels.FromStrings("__name__", "metric", "job", "1")
1521
	lbls2 := labels.FromStrings("__name__", "metric", "job", "2")
1522
	lblstopk2 := labels.FromStrings("__name__", "metric_topk", "instance", "2")
1523
	lblstopk3 := labels.FromStrings("__name__", "metric_topk", "instance", "3")
1524
	lblsms := labels.FromStrings("__name__", "metric_ms")
1525
	lblsneg := labels.FromStrings("__name__", "metric_neg")
1526

1527
	// Add some samples with negative timestamp.
1528
	db := storage.DB
1529
	app := db.Appender(context.Background())
1530
	ref, err := app.Append(0, lblsneg, -1000000, 1000)
1531
	require.NoError(t, err)
1532
	for ts := int64(-1000000 + 1000); ts <= 0; ts += 1000 {
1533
		_, err := app.Append(ref, labels.EmptyLabels(), ts, -float64(ts/1000)+1)
1534
		require.NoError(t, err)
1535
	}
1536

1537
	// To test the fix for https://github.com/prometheus/prometheus/issues/8433.
1538
	_, err = app.Append(0, labels.FromStrings("__name__", "metric_timestamp"), 3600*1000, 1000)
1539
	require.NoError(t, err)
1540

1541
	require.NoError(t, app.Commit())
1542

1543
	cases := []struct {
1544
		query                string
1545
		start, end, interval int64 // Time in seconds.
1546
		result               parser.Value
1547
	}{
1548
		{ // Time of the result is the evaluation time.
1549
			query: `metric_neg @ 0`,
1550
			start: 100,
1551
			result: promql.Vector{
1552
				promql.Sample{F: 1, T: 100000, Metric: lblsneg},
1553
			},
1554
		}, {
1555
			query: `metric_neg @ -200`,
1556
			start: 100,
1557
			result: promql.Vector{
1558
				promql.Sample{F: 201, T: 100000, Metric: lblsneg},
1559
			},
1560
		}, {
1561
			query: `metric{job="2"} @ 50`,
1562
			start: -2, end: 2, interval: 1,
1563
			result: promql.Matrix{
1564
				promql.Series{
1565
					Floats: []promql.FPoint{{F: 10, T: -2000}, {F: 10, T: -1000}, {F: 10, T: 0}, {F: 10, T: 1000}, {F: 10, T: 2000}},
1566
					Metric: lbls2,
1567
				},
1568
			},
1569
		}, { // Timestamps for matrix selector does not depend on the evaluation time.
1570
			query: "metric[20s] @ 300",
1571
			start: 10,
1572
			result: promql.Matrix{
1573
				promql.Series{
1574
					Floats: []promql.FPoint{{F: 28, T: 280000}, {F: 29, T: 290000}, {F: 30, T: 300000}},
1575
					Metric: lbls1,
1576
				},
1577
				promql.Series{
1578
					Floats: []promql.FPoint{{F: 56, T: 280000}, {F: 58, T: 290000}, {F: 60, T: 300000}},
1579
					Metric: lbls2,
1580
				},
1581
			},
1582
		}, {
1583
			query: `metric_neg[2s] @ 0`,
1584
			start: 100,
1585
			result: promql.Matrix{
1586
				promql.Series{
1587
					Floats: []promql.FPoint{{F: 3, T: -2000}, {F: 2, T: -1000}, {F: 1, T: 0}},
1588
					Metric: lblsneg,
1589
				},
1590
			},
1591
		}, {
1592
			query: `metric_neg[3s] @ -500`,
1593
			start: 100,
1594
			result: promql.Matrix{
1595
				promql.Series{
1596
					Floats: []promql.FPoint{{F: 504, T: -503000}, {F: 503, T: -502000}, {F: 502, T: -501000}, {F: 501, T: -500000}},
1597
					Metric: lblsneg,
1598
				},
1599
			},
1600
		}, {
1601
			query: `metric_ms[3ms] @ 2.345`,
1602
			start: 100,
1603
			result: promql.Matrix{
1604
				promql.Series{
1605
					Floats: []promql.FPoint{{F: 2342, T: 2342}, {F: 2343, T: 2343}, {F: 2344, T: 2344}, {F: 2345, T: 2345}},
1606
					Metric: lblsms,
1607
				},
1608
			},
1609
		}, {
1610
			query: "metric[100s:25s] @ 300",
1611
			start: 100,
1612
			result: promql.Matrix{
1613
				promql.Series{
1614
					Floats: []promql.FPoint{{F: 20, T: 200000}, {F: 22, T: 225000}, {F: 25, T: 250000}, {F: 27, T: 275000}, {F: 30, T: 300000}},
1615
					Metric: lbls1,
1616
				},
1617
				promql.Series{
1618
					Floats: []promql.FPoint{{F: 40, T: 200000}, {F: 44, T: 225000}, {F: 50, T: 250000}, {F: 54, T: 275000}, {F: 60, T: 300000}},
1619
					Metric: lbls2,
1620
				},
1621
			},
1622
		}, {
1623
			query: "metric_neg[50s:25s] @ 0",
1624
			start: 100,
1625
			result: promql.Matrix{
1626
				promql.Series{
1627
					Floats: []promql.FPoint{{F: 51, T: -50000}, {F: 26, T: -25000}, {F: 1, T: 0}},
1628
					Metric: lblsneg,
1629
				},
1630
			},
1631
		}, {
1632
			query: "metric_neg[50s:25s] @ -100",
1633
			start: 100,
1634
			result: promql.Matrix{
1635
				promql.Series{
1636
					Floats: []promql.FPoint{{F: 151, T: -150000}, {F: 126, T: -125000}, {F: 101, T: -100000}},
1637
					Metric: lblsneg,
1638
				},
1639
			},
1640
		}, {
1641
			query: `metric_ms[100ms:25ms] @ 2.345`,
1642
			start: 100,
1643
			result: promql.Matrix{
1644
				promql.Series{
1645
					Floats: []promql.FPoint{{F: 2250, T: 2250}, {F: 2275, T: 2275}, {F: 2300, T: 2300}, {F: 2325, T: 2325}},
1646
					Metric: lblsms,
1647
				},
1648
			},
1649
		}, {
1650
			query: `metric_topk and topk(1, sum_over_time(metric_topk[50s] @ 100))`,
1651
			start: 50, end: 80, interval: 10,
1652
			result: promql.Matrix{
1653
				promql.Series{
1654
					Floats: []promql.FPoint{{F: 995, T: 50000}, {F: 994, T: 60000}, {F: 993, T: 70000}, {F: 992, T: 80000}},
1655
					Metric: lblstopk3,
1656
				},
1657
			},
1658
		}, {
1659
			query: `metric_topk and topk(1, sum_over_time(metric_topk[50s] @ 5000))`,
1660
			start: 50, end: 80, interval: 10,
1661
			result: promql.Matrix{
1662
				promql.Series{
1663
					Floats: []promql.FPoint{{F: 10, T: 50000}, {F: 12, T: 60000}, {F: 14, T: 70000}, {F: 16, T: 80000}},
1664
					Metric: lblstopk2,
1665
				},
1666
			},
1667
		}, {
1668
			query: `metric_topk and topk(1, sum_over_time(metric_topk[50s] @ end()))`,
1669
			start: 70, end: 100, interval: 10,
1670
			result: promql.Matrix{
1671
				promql.Series{
1672
					Floats: []promql.FPoint{{F: 993, T: 70000}, {F: 992, T: 80000}, {F: 991, T: 90000}, {F: 990, T: 100000}},
1673
					Metric: lblstopk3,
1674
				},
1675
			},
1676
		}, {
1677
			query: `metric_topk and topk(1, sum_over_time(metric_topk[50s] @ start()))`,
1678
			start: 100, end: 130, interval: 10,
1679
			result: promql.Matrix{
1680
				promql.Series{
1681
					Floats: []promql.FPoint{{F: 990, T: 100000}, {F: 989, T: 110000}, {F: 988, T: 120000}, {F: 987, T: 130000}},
1682
					Metric: lblstopk3,
1683
				},
1684
			},
1685
		}, {
1686
			// Tests for https://github.com/prometheus/prometheus/issues/8433.
1687
			// The trick here is that the query range should be > lookback delta.
1688
			query: `timestamp(metric_timestamp @ 3600)`,
1689
			start: 0, end: 7 * 60, interval: 60,
1690
			result: promql.Matrix{
1691
				promql.Series{
1692
					Floats: []promql.FPoint{
1693
						{F: 3600, T: 0},
1694
						{F: 3600, T: 60 * 1000},
1695
						{F: 3600, T: 2 * 60 * 1000},
1696
						{F: 3600, T: 3 * 60 * 1000},
1697
						{F: 3600, T: 4 * 60 * 1000},
1698
						{F: 3600, T: 5 * 60 * 1000},
1699
						{F: 3600, T: 6 * 60 * 1000},
1700
						{F: 3600, T: 7 * 60 * 1000},
1701
					},
1702
					Metric: labels.EmptyLabels(),
1703
				},
1704
			},
1705
		},
1706
	}
1707

1708
	for _, c := range cases {
1709
		t.Run(c.query, func(t *testing.T) {
1710
			if c.interval == 0 {
1711
				c.interval = 1
1712
			}
1713
			start, end, interval := time.Unix(c.start, 0), time.Unix(c.end, 0), time.Duration(c.interval)*time.Second
1714
			var err error
1715
			var qry promql.Query
1716
			if c.end == 0 {
1717
				qry, err = engine.NewInstantQuery(context.Background(), storage, nil, c.query, start)
1718
			} else {
1719
				qry, err = engine.NewRangeQuery(context.Background(), storage, nil, c.query, start, end, interval)
1720
			}
1721
			require.NoError(t, err)
1722

1723
			res := qry.Exec(context.Background())
1724
			require.NoError(t, res.Err)
1725
			if expMat, ok := c.result.(promql.Matrix); ok {
1726
				sort.Sort(expMat)
1727
				sort.Sort(res.Value.(promql.Matrix))
1728
			}
1729
			testutil.RequireEqual(t, c.result, res.Value, "query %q failed", c.query)
1730
		})
1731
	}
1732
}
1733

1734
func TestSubquerySelector(t *testing.T) {
1735
	type caseType struct {
1736
		Query  string
1737
		Result promql.Result
1738
		Start  time.Time
1739
	}
1740

1741
	for _, tst := range []struct {
1742
		loadString string
1743
		cases      []caseType
1744
	}{
1745
		{
1746
			loadString: `load 10s
1747
							metric 1 2`,
1748
			cases: []caseType{
1749
				{
1750
					Query: "metric[20s:10s]",
1751
					Result: promql.Result{
1752
						nil,
1753
						promql.Matrix{
1754
							promql.Series{
1755
								Floats: []promql.FPoint{{F: 1, T: 0}, {F: 2, T: 10000}},
1756
								Metric: labels.FromStrings("__name__", "metric"),
1757
							},
1758
						},
1759
						nil,
1760
					},
1761
					Start: time.Unix(10, 0),
1762
				},
1763
				{
1764
					Query: "metric[20s:5s]",
1765
					Result: promql.Result{
1766
						nil,
1767
						promql.Matrix{
1768
							promql.Series{
1769
								Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
1770
								Metric: labels.FromStrings("__name__", "metric"),
1771
							},
1772
						},
1773
						nil,
1774
					},
1775
					Start: time.Unix(10, 0),
1776
				},
1777
				{
1778
					Query: "metric[20s:5s] offset 2s",
1779
					Result: promql.Result{
1780
						nil,
1781
						promql.Matrix{
1782
							promql.Series{
1783
								Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
1784
								Metric: labels.FromStrings("__name__", "metric"),
1785
							},
1786
						},
1787
						nil,
1788
					},
1789
					Start: time.Unix(12, 0),
1790
				},
1791
				{
1792
					Query: "metric[20s:5s] offset 6s",
1793
					Result: promql.Result{
1794
						nil,
1795
						promql.Matrix{
1796
							promql.Series{
1797
								Floats: []promql.FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
1798
								Metric: labels.FromStrings("__name__", "metric"),
1799
							},
1800
						},
1801
						nil,
1802
					},
1803
					Start: time.Unix(20, 0),
1804
				},
1805
				{
1806
					Query: "metric[20s:5s] offset 4s",
1807
					Result: promql.Result{
1808
						nil,
1809
						promql.Matrix{
1810
							promql.Series{
1811
								Floats: []promql.FPoint{{F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}, {F: 2, T: 30000}},
1812
								Metric: labels.FromStrings("__name__", "metric"),
1813
							},
1814
						},
1815
						nil,
1816
					},
1817
					Start: time.Unix(35, 0),
1818
				},
1819
				{
1820
					Query: "metric[20s:5s] offset 5s",
1821
					Result: promql.Result{
1822
						nil,
1823
						promql.Matrix{
1824
							promql.Series{
1825
								Floats: []promql.FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}, {F: 2, T: 30000}},
1826
								Metric: labels.FromStrings("__name__", "metric"),
1827
							},
1828
						},
1829
						nil,
1830
					},
1831
					Start: time.Unix(35, 0),
1832
				},
1833
				{
1834
					Query: "metric[20s:5s] offset 6s",
1835
					Result: promql.Result{
1836
						nil,
1837
						promql.Matrix{
1838
							promql.Series{
1839
								Floats: []promql.FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}},
1840
								Metric: labels.FromStrings("__name__", "metric"),
1841
							},
1842
						},
1843
						nil,
1844
					},
1845
					Start: time.Unix(35, 0),
1846
				},
1847
				{
1848
					Query: "metric[20s:5s] offset 7s",
1849
					Result: promql.Result{
1850
						nil,
1851
						promql.Matrix{
1852
							promql.Series{
1853
								Floats: []promql.FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}},
1854
								Metric: labels.FromStrings("__name__", "metric"),
1855
							},
1856
						},
1857
						nil,
1858
					},
1859
					Start: time.Unix(35, 0),
1860
				},
1861
			},
1862
		},
1863
		{
1864
			loadString: `load 10s
1865
							http_requests{job="api-server", instance="0", group="production"}	0+10x1000 100+30x1000
1866
							http_requests{job="api-server", instance="1", group="production"}	0+20x1000 200+30x1000
1867
							http_requests{job="api-server", instance="0", group="canary"}		0+30x1000 300+80x1000
1868
							http_requests{job="api-server", instance="1", group="canary"}		0+40x2000`,
1869
			cases: []caseType{
1870
				{ // Normal selector.
1871
					Query: `http_requests{group=~"pro.*",instance="0"}[30s:10s]`,
1872
					Result: promql.Result{
1873
						nil,
1874
						promql.Matrix{
1875
							promql.Series{
1876
								Floats: []promql.FPoint{{F: 9990, T: 9990000}, {F: 10000, T: 10000000}, {F: 100, T: 10010000}, {F: 130, T: 10020000}},
1877
								Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
1878
							},
1879
						},
1880
						nil,
1881
					},
1882
					Start: time.Unix(10020, 0),
1883
				},
1884
				{ // Default step.
1885
					Query: `http_requests{group=~"pro.*",instance="0"}[5m:]`,
1886
					Result: promql.Result{
1887
						nil,
1888
						promql.Matrix{
1889
							promql.Series{
1890
								Floats: []promql.FPoint{{F: 9840, T: 9840000}, {F: 9900, T: 9900000}, {F: 9960, T: 9960000}, {F: 130, T: 10020000}, {F: 310, T: 10080000}},
1891
								Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
1892
							},
1893
						},
1894
						nil,
1895
					},
1896
					Start: time.Unix(10100, 0),
1897
				},
1898
				{ // Checking if high offset (>LookbackDelta) is being taken care of.
1899
					Query: `http_requests{group=~"pro.*",instance="0"}[5m:] offset 20m`,
1900
					Result: promql.Result{
1901
						nil,
1902
						promql.Matrix{
1903
							promql.Series{
1904
								Floats: []promql.FPoint{{F: 8640, T: 8640000}, {F: 8700, T: 8700000}, {F: 8760, T: 8760000}, {F: 8820, T: 8820000}, {F: 8880, T: 8880000}},
1905
								Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
1906
							},
1907
						},
1908
						nil,
1909
					},
1910
					Start: time.Unix(10100, 0),
1911
				},
1912
				{
1913
					Query: `rate(http_requests[1m])[15s:5s]`,
1914
					Result: promql.Result{
1915
						nil,
1916
						promql.Matrix{
1917
							promql.Series{
1918
								Floats: []promql.FPoint{{F: 3, T: 7985000}, {F: 3, T: 7990000}, {F: 3, T: 7995000}, {F: 3, T: 8000000}},
1919
								Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "canary"),
1920
							},
1921
							promql.Series{
1922
								Floats: []promql.FPoint{{F: 4, T: 7985000}, {F: 4, T: 7990000}, {F: 4, T: 7995000}, {F: 4, T: 8000000}},
1923
								Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "canary"),
1924
							},
1925
							promql.Series{
1926
								Floats: []promql.FPoint{{F: 1, T: 7985000}, {F: 1, T: 7990000}, {F: 1, T: 7995000}, {F: 1, T: 8000000}},
1927
								Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "production"),
1928
							},
1929
							promql.Series{
1930
								Floats: []promql.FPoint{{F: 2, T: 7985000}, {F: 2, T: 7990000}, {F: 2, T: 7995000}, {F: 2, T: 8000000}},
1931
								Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "production"),
1932
							},
1933
						},
1934
						nil,
1935
					},
1936
					Start: time.Unix(8000, 0),
1937
				},
1938
				{
1939
					Query: `sum(http_requests{group=~"pro.*"})[30s:10s]`,
1940
					Result: promql.Result{
1941
						nil,
1942
						promql.Matrix{
1943
							promql.Series{
1944
								Floats: []promql.FPoint{{F: 270, T: 90000}, {F: 300, T: 100000}, {F: 330, T: 110000}, {F: 360, T: 120000}},
1945
								Metric: labels.EmptyLabels(),
1946
							},
1947
						},
1948
						nil,
1949
					},
1950
					Start: time.Unix(120, 0),
1951
				},
1952
				{
1953
					Query: `sum(http_requests)[40s:10s]`,
1954
					Result: promql.Result{
1955
						nil,
1956
						promql.Matrix{
1957
							promql.Series{
1958
								Floats: []promql.FPoint{{F: 800, T: 80000}, {F: 900, T: 90000}, {F: 1000, T: 100000}, {F: 1100, T: 110000}, {F: 1200, T: 120000}},
1959
								Metric: labels.EmptyLabels(),
1960
							},
1961
						},
1962
						nil,
1963
					},
1964
					Start: time.Unix(120, 0),
1965
				},
1966
				{
1967
					Query: `(sum(http_requests{group=~"p.*"})+sum(http_requests{group=~"c.*"}))[20s:5s]`,
1968
					Result: promql.Result{
1969
						nil,
1970
						promql.Matrix{
1971
							promql.Series{
1972
								Floats: []promql.FPoint{{F: 1000, T: 100000}, {F: 1000, T: 105000}, {F: 1100, T: 110000}, {F: 1100, T: 115000}, {F: 1200, T: 120000}},
1973
								Metric: labels.EmptyLabels(),
1974
							},
1975
						},
1976
						nil,
1977
					},
1978
					Start: time.Unix(120, 0),
1979
				},
1980
			},
1981
		},
1982
	} {
1983
		t.Run("", func(t *testing.T) {
1984
			engine := newTestEngine()
1985
			storage := promqltest.LoadedStorage(t, tst.loadString)
1986
			t.Cleanup(func() { storage.Close() })
1987

1988
			for _, c := range tst.cases {
1989
				t.Run(c.Query, func(t *testing.T) {
1990
					qry, err := engine.NewInstantQuery(context.Background(), storage, nil, c.Query, c.Start)
1991
					require.NoError(t, err)
1992

1993
					res := qry.Exec(context.Background())
1994
					require.Equal(t, c.Result.Err, res.Err)
1995
					mat := res.Value.(promql.Matrix)
1996
					sort.Sort(mat)
1997
					testutil.RequireEqual(t, c.Result.Value, mat)
1998
				})
1999
			}
2000
		})
2001
	}
2002
}
2003

2004
func TestTimestampFunction_StepsMoreOftenThanSamples(t *testing.T) {
2005
	engine := newTestEngine()
2006
	storage := promqltest.LoadedStorage(t, `
2007
load 1m
2008
  metric 0+1x1000
2009
`)
2010
	t.Cleanup(func() { storage.Close() })
2011

2012
	query := "timestamp(metric)"
2013
	start := time.Unix(0, 0)
2014
	end := time.Unix(61, 0)
2015
	interval := time.Second
2016

2017
	// We expect the value to be 0 for t=0s to t=59s (inclusive), then 60 for t=60s and t=61s.
2018
	expectedPoints := []promql.FPoint{}
2019

2020
	for t := 0; t <= 59; t++ {
2021
		expectedPoints = append(expectedPoints, promql.FPoint{F: 0, T: int64(t * 1000)})
2022
	}
2023

2024
	expectedPoints = append(
2025
		expectedPoints,
2026
		promql.FPoint{F: 60, T: 60_000},
2027
		promql.FPoint{F: 60, T: 61_000},
2028
	)
2029

2030
	expectedResult := promql.Matrix{
2031
		promql.Series{
2032
			Floats: expectedPoints,
2033
			Metric: labels.EmptyLabels(),
2034
		},
2035
	}
2036

2037
	qry, err := engine.NewRangeQuery(context.Background(), storage, nil, query, start, end, interval)
2038
	require.NoError(t, err)
2039

2040
	res := qry.Exec(context.Background())
2041
	require.NoError(t, res.Err)
2042
	testutil.RequireEqual(t, expectedResult, res.Value)
2043
}
2044

2045
type FakeQueryLogger struct {
2046
	closed bool
2047
	logs   []interface{}
2048
}
2049

2050
func NewFakeQueryLogger() *FakeQueryLogger {
2051
	return &FakeQueryLogger{
2052
		closed: false,
2053
		logs:   make([]interface{}, 0),
2054
	}
2055
}
2056

2057
func (f *FakeQueryLogger) Close() error {
2058
	f.closed = true
2059
	return nil
2060
}
2061

2062
func (f *FakeQueryLogger) Log(l ...interface{}) error {
2063
	f.logs = append(f.logs, l...)
2064
	return nil
2065
}
2066

2067
func TestQueryLogger_basic(t *testing.T) {
2068
	opts := promql.EngineOpts{
2069
		Logger:     nil,
2070
		Reg:        nil,
2071
		MaxSamples: 10,
2072
		Timeout:    10 * time.Second,
2073
	}
2074
	engine := promql.NewEngine(opts)
2075

2076
	queryExec := func() {
2077
		ctx, cancelCtx := context.WithCancel(context.Background())
2078
		defer cancelCtx()
2079
		query := engine.NewTestQuery(func(ctx context.Context) error {
2080
			return contextDone(ctx, "test statement execution")
2081
		})
2082
		res := query.Exec(ctx)
2083
		require.NoError(t, res.Err)
2084
	}
2085

2086
	// promql.Query works without query log initialized.
2087
	queryExec()
2088

2089
	f1 := NewFakeQueryLogger()
2090
	engine.SetQueryLogger(f1)
2091
	queryExec()
2092
	for i, field := range []interface{}{"params", map[string]interface{}{"query": "test statement"}} {
2093
		require.Equal(t, field, f1.logs[i])
2094
	}
2095

2096
	l := len(f1.logs)
2097
	queryExec()
2098
	require.Len(t, f1.logs, 2*l)
2099

2100
	// Test that we close the query logger when unsetting it.
2101
	require.False(t, f1.closed, "expected f1 to be open, got closed")
2102
	engine.SetQueryLogger(nil)
2103
	require.True(t, f1.closed, "expected f1 to be closed, got open")
2104
	queryExec()
2105

2106
	// Test that we close the query logger when swapping.
2107
	f2 := NewFakeQueryLogger()
2108
	f3 := NewFakeQueryLogger()
2109
	engine.SetQueryLogger(f2)
2110
	require.False(t, f2.closed, "expected f2 to be open, got closed")
2111
	queryExec()
2112
	engine.SetQueryLogger(f3)
2113
	require.True(t, f2.closed, "expected f2 to be closed, got open")
2114
	require.False(t, f3.closed, "expected f3 to be open, got closed")
2115
	queryExec()
2116
}
2117

2118
func TestQueryLogger_fields(t *testing.T) {
2119
	opts := promql.EngineOpts{
2120
		Logger:     nil,
2121
		Reg:        nil,
2122
		MaxSamples: 10,
2123
		Timeout:    10 * time.Second,
2124
	}
2125
	engine := promql.NewEngine(opts)
2126

2127
	f1 := NewFakeQueryLogger()
2128
	engine.SetQueryLogger(f1)
2129

2130
	ctx, cancelCtx := context.WithCancel(context.Background())
2131
	ctx = promql.NewOriginContext(ctx, map[string]interface{}{"foo": "bar"})
2132
	defer cancelCtx()
2133
	query := engine.NewTestQuery(func(ctx context.Context) error {
2134
		return contextDone(ctx, "test statement execution")
2135
	})
2136

2137
	res := query.Exec(ctx)
2138
	require.NoError(t, res.Err)
2139

2140
	expected := []string{"foo", "bar"}
2141
	for i, field := range expected {
2142
		v := f1.logs[len(f1.logs)-len(expected)+i].(string)
2143
		require.Equal(t, field, v)
2144
	}
2145
}
2146

2147
func TestQueryLogger_error(t *testing.T) {
2148
	opts := promql.EngineOpts{
2149
		Logger:     nil,
2150
		Reg:        nil,
2151
		MaxSamples: 10,
2152
		Timeout:    10 * time.Second,
2153
	}
2154
	engine := promql.NewEngine(opts)
2155

2156
	f1 := NewFakeQueryLogger()
2157
	engine.SetQueryLogger(f1)
2158

2159
	ctx, cancelCtx := context.WithCancel(context.Background())
2160
	ctx = promql.NewOriginContext(ctx, map[string]interface{}{"foo": "bar"})
2161
	defer cancelCtx()
2162
	testErr := errors.New("failure")
2163
	query := engine.NewTestQuery(func(ctx context.Context) error {
2164
		return testErr
2165
	})
2166

2167
	res := query.Exec(ctx)
2168
	require.Error(t, res.Err, "query should have failed")
2169

2170
	for i, field := range []interface{}{"params", map[string]interface{}{"query": "test statement"}, "error", testErr} {
2171
		require.Equal(t, f1.logs[i], field)
2172
	}
2173
}
2174

2175
func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) {
2176
	startTime := time.Unix(1000, 0)
2177
	endTime := time.Unix(9999, 0)
2178
	testCases := []struct {
2179
		input      string      // The input to be parsed.
2180
		expected   parser.Expr // The expected expression AST.
2181
		outputTest bool
2182
	}{
2183
		{
2184
			input: "123.4567",
2185
			expected: &parser.StepInvariantExpr{
2186
				Expr: &parser.NumberLiteral{
2187
					Val:      123.4567,
2188
					PosRange: posrange.PositionRange{Start: 0, End: 8},
2189
				},
2190
			},
2191
		},
2192
		{
2193
			input: `"foo"`,
2194
			expected: &parser.StepInvariantExpr{
2195
				Expr: &parser.StringLiteral{
2196
					Val:      "foo",
2197
					PosRange: posrange.PositionRange{Start: 0, End: 5},
2198
				},
2199
			},
2200
		},
2201
		{
2202
			input: "foo * bar",
2203
			expected: &parser.BinaryExpr{
2204
				Op: parser.MUL,
2205
				LHS: &parser.VectorSelector{
2206
					Name: "foo",
2207
					LabelMatchers: []*labels.Matcher{
2208
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2209
					},
2210
					PosRange: posrange.PositionRange{
2211
						Start: 0,
2212
						End:   3,
2213
					},
2214
				},
2215
				RHS: &parser.VectorSelector{
2216
					Name: "bar",
2217
					LabelMatchers: []*labels.Matcher{
2218
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"),
2219
					},
2220
					PosRange: posrange.PositionRange{
2221
						Start: 6,
2222
						End:   9,
2223
					},
2224
				},
2225
				VectorMatching: &parser.VectorMatching{Card: parser.CardOneToOne},
2226
			},
2227
		},
2228
		{
2229
			input: "foo * bar @ 10",
2230
			expected: &parser.BinaryExpr{
2231
				Op: parser.MUL,
2232
				LHS: &parser.VectorSelector{
2233
					Name: "foo",
2234
					LabelMatchers: []*labels.Matcher{
2235
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2236
					},
2237
					PosRange: posrange.PositionRange{
2238
						Start: 0,
2239
						End:   3,
2240
					},
2241
				},
2242
				RHS: &parser.StepInvariantExpr{
2243
					Expr: &parser.VectorSelector{
2244
						Name: "bar",
2245
						LabelMatchers: []*labels.Matcher{
2246
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"),
2247
						},
2248
						PosRange: posrange.PositionRange{
2249
							Start: 6,
2250
							End:   14,
2251
						},
2252
						Timestamp: makeInt64Pointer(10000),
2253
					},
2254
				},
2255
				VectorMatching: &parser.VectorMatching{Card: parser.CardOneToOne},
2256
			},
2257
		},
2258
		{
2259
			input: "foo @ 20 * bar @ 10",
2260
			expected: &parser.StepInvariantExpr{
2261
				Expr: &parser.BinaryExpr{
2262
					Op: parser.MUL,
2263
					LHS: &parser.VectorSelector{
2264
						Name: "foo",
2265
						LabelMatchers: []*labels.Matcher{
2266
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2267
						},
2268
						PosRange: posrange.PositionRange{
2269
							Start: 0,
2270
							End:   8,
2271
						},
2272
						Timestamp: makeInt64Pointer(20000),
2273
					},
2274
					RHS: &parser.VectorSelector{
2275
						Name: "bar",
2276
						LabelMatchers: []*labels.Matcher{
2277
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"),
2278
						},
2279
						PosRange: posrange.PositionRange{
2280
							Start: 11,
2281
							End:   19,
2282
						},
2283
						Timestamp: makeInt64Pointer(10000),
2284
					},
2285
					VectorMatching: &parser.VectorMatching{Card: parser.CardOneToOne},
2286
				},
2287
			},
2288
		},
2289
		{
2290
			input: "test[5s]",
2291
			expected: &parser.MatrixSelector{
2292
				VectorSelector: &parser.VectorSelector{
2293
					Name: "test",
2294
					LabelMatchers: []*labels.Matcher{
2295
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"),
2296
					},
2297
					PosRange: posrange.PositionRange{
2298
						Start: 0,
2299
						End:   4,
2300
					},
2301
				},
2302
				Range:  5 * time.Second,
2303
				EndPos: 8,
2304
			},
2305
		},
2306
		{
2307
			input: `test{a="b"}[5y] @ 1603774699`,
2308
			expected: &parser.StepInvariantExpr{
2309
				Expr: &parser.MatrixSelector{
2310
					VectorSelector: &parser.VectorSelector{
2311
						Name:      "test",
2312
						Timestamp: makeInt64Pointer(1603774699000),
2313
						LabelMatchers: []*labels.Matcher{
2314
							parser.MustLabelMatcher(labels.MatchEqual, "a", "b"),
2315
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"),
2316
						},
2317
						PosRange: posrange.PositionRange{
2318
							Start: 0,
2319
							End:   11,
2320
						},
2321
					},
2322
					Range:  5 * 365 * 24 * time.Hour,
2323
					EndPos: 28,
2324
				},
2325
			},
2326
		},
2327
		{
2328
			input: "sum by (foo)(some_metric)",
2329
			expected: &parser.AggregateExpr{
2330
				Op: parser.SUM,
2331
				Expr: &parser.VectorSelector{
2332
					Name: "some_metric",
2333
					LabelMatchers: []*labels.Matcher{
2334
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2335
					},
2336
					PosRange: posrange.PositionRange{
2337
						Start: 13,
2338
						End:   24,
2339
					},
2340
				},
2341
				Grouping: []string{"foo"},
2342
				PosRange: posrange.PositionRange{
2343
					Start: 0,
2344
					End:   25,
2345
				},
2346
			},
2347
		},
2348
		{
2349
			input: "sum by (foo)(some_metric @ 10)",
2350
			expected: &parser.StepInvariantExpr{
2351
				Expr: &parser.AggregateExpr{
2352
					Op: parser.SUM,
2353
					Expr: &parser.VectorSelector{
2354
						Name: "some_metric",
2355
						LabelMatchers: []*labels.Matcher{
2356
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2357
						},
2358
						PosRange: posrange.PositionRange{
2359
							Start: 13,
2360
							End:   29,
2361
						},
2362
						Timestamp: makeInt64Pointer(10000),
2363
					},
2364
					Grouping: []string{"foo"},
2365
					PosRange: posrange.PositionRange{
2366
						Start: 0,
2367
						End:   30,
2368
					},
2369
				},
2370
			},
2371
		},
2372
		{
2373
			input: "sum(some_metric1 @ 10) + sum(some_metric2 @ 20)",
2374
			expected: &parser.StepInvariantExpr{
2375
				Expr: &parser.BinaryExpr{
2376
					Op:             parser.ADD,
2377
					VectorMatching: &parser.VectorMatching{},
2378
					LHS: &parser.AggregateExpr{
2379
						Op: parser.SUM,
2380
						Expr: &parser.VectorSelector{
2381
							Name: "some_metric1",
2382
							LabelMatchers: []*labels.Matcher{
2383
								parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric1"),
2384
							},
2385
							PosRange: posrange.PositionRange{
2386
								Start: 4,
2387
								End:   21,
2388
							},
2389
							Timestamp: makeInt64Pointer(10000),
2390
						},
2391
						PosRange: posrange.PositionRange{
2392
							Start: 0,
2393
							End:   22,
2394
						},
2395
					},
2396
					RHS: &parser.AggregateExpr{
2397
						Op: parser.SUM,
2398
						Expr: &parser.VectorSelector{
2399
							Name: "some_metric2",
2400
							LabelMatchers: []*labels.Matcher{
2401
								parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric2"),
2402
							},
2403
							PosRange: posrange.PositionRange{
2404
								Start: 29,
2405
								End:   46,
2406
							},
2407
							Timestamp: makeInt64Pointer(20000),
2408
						},
2409
						PosRange: posrange.PositionRange{
2410
							Start: 25,
2411
							End:   47,
2412
						},
2413
					},
2414
				},
2415
			},
2416
		},
2417
		{
2418
			input: "some_metric and topk(5, rate(some_metric[1m] @ 20))",
2419
			expected: &parser.BinaryExpr{
2420
				Op: parser.LAND,
2421
				VectorMatching: &parser.VectorMatching{
2422
					Card: parser.CardManyToMany,
2423
				},
2424
				LHS: &parser.VectorSelector{
2425
					Name: "some_metric",
2426
					LabelMatchers: []*labels.Matcher{
2427
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2428
					},
2429
					PosRange: posrange.PositionRange{
2430
						Start: 0,
2431
						End:   11,
2432
					},
2433
				},
2434
				RHS: &parser.StepInvariantExpr{
2435
					Expr: &parser.AggregateExpr{
2436
						Op: parser.TOPK,
2437
						Expr: &parser.Call{
2438
							Func: parser.MustGetFunction("rate"),
2439
							Args: parser.Expressions{
2440
								&parser.MatrixSelector{
2441
									VectorSelector: &parser.VectorSelector{
2442
										Name: "some_metric",
2443
										LabelMatchers: []*labels.Matcher{
2444
											parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2445
										},
2446
										PosRange: posrange.PositionRange{
2447
											Start: 29,
2448
											End:   40,
2449
										},
2450
										Timestamp: makeInt64Pointer(20000),
2451
									},
2452
									Range:  1 * time.Minute,
2453
									EndPos: 49,
2454
								},
2455
							},
2456
							PosRange: posrange.PositionRange{
2457
								Start: 24,
2458
								End:   50,
2459
							},
2460
						},
2461
						Param: &parser.NumberLiteral{
2462
							Val: 5,
2463
							PosRange: posrange.PositionRange{
2464
								Start: 21,
2465
								End:   22,
2466
							},
2467
						},
2468
						PosRange: posrange.PositionRange{
2469
							Start: 16,
2470
							End:   51,
2471
						},
2472
					},
2473
				},
2474
			},
2475
		},
2476
		{
2477
			input: "time()",
2478
			expected: &parser.Call{
2479
				Func: parser.MustGetFunction("time"),
2480
				Args: parser.Expressions{},
2481
				PosRange: posrange.PositionRange{
2482
					Start: 0,
2483
					End:   6,
2484
				},
2485
			},
2486
		},
2487
		{
2488
			input: `foo{bar="baz"}[10m:6s]`,
2489
			expected: &parser.SubqueryExpr{
2490
				Expr: &parser.VectorSelector{
2491
					Name: "foo",
2492
					LabelMatchers: []*labels.Matcher{
2493
						parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"),
2494
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2495
					},
2496
					PosRange: posrange.PositionRange{
2497
						Start: 0,
2498
						End:   14,
2499
					},
2500
				},
2501
				Range:  10 * time.Minute,
2502
				Step:   6 * time.Second,
2503
				EndPos: 22,
2504
			},
2505
		},
2506
		{
2507
			input: `foo{bar="baz"}[10m:6s] @ 10`,
2508
			expected: &parser.StepInvariantExpr{
2509
				Expr: &parser.SubqueryExpr{
2510
					Expr: &parser.VectorSelector{
2511
						Name: "foo",
2512
						LabelMatchers: []*labels.Matcher{
2513
							parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"),
2514
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2515
						},
2516
						PosRange: posrange.PositionRange{
2517
							Start: 0,
2518
							End:   14,
2519
						},
2520
					},
2521
					Range:     10 * time.Minute,
2522
					Step:      6 * time.Second,
2523
					Timestamp: makeInt64Pointer(10000),
2524
					EndPos:    27,
2525
				},
2526
			},
2527
		},
2528
		{ // Even though the subquery is step invariant, the inside is also wrapped separately.
2529
			input: `sum(foo{bar="baz"} @ 20)[10m:6s] @ 10`,
2530
			expected: &parser.StepInvariantExpr{
2531
				Expr: &parser.SubqueryExpr{
2532
					Expr: &parser.StepInvariantExpr{
2533
						Expr: &parser.AggregateExpr{
2534
							Op: parser.SUM,
2535
							Expr: &parser.VectorSelector{
2536
								Name: "foo",
2537
								LabelMatchers: []*labels.Matcher{
2538
									parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"),
2539
									parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2540
								},
2541
								PosRange: posrange.PositionRange{
2542
									Start: 4,
2543
									End:   23,
2544
								},
2545
								Timestamp: makeInt64Pointer(20000),
2546
							},
2547
							PosRange: posrange.PositionRange{
2548
								Start: 0,
2549
								End:   24,
2550
							},
2551
						},
2552
					},
2553
					Range:     10 * time.Minute,
2554
					Step:      6 * time.Second,
2555
					Timestamp: makeInt64Pointer(10000),
2556
					EndPos:    37,
2557
				},
2558
			},
2559
		},
2560
		{
2561
			input: `min_over_time(rate(foo{bar="baz"}[2s])[5m:] @ 1603775091)[4m:3s]`,
2562
			expected: &parser.SubqueryExpr{
2563
				Expr: &parser.StepInvariantExpr{
2564
					Expr: &parser.Call{
2565
						Func: parser.MustGetFunction("min_over_time"),
2566
						Args: parser.Expressions{
2567
							&parser.SubqueryExpr{
2568
								Expr: &parser.Call{
2569
									Func: parser.MustGetFunction("rate"),
2570
									Args: parser.Expressions{
2571
										&parser.MatrixSelector{
2572
											VectorSelector: &parser.VectorSelector{
2573
												Name: "foo",
2574
												LabelMatchers: []*labels.Matcher{
2575
													parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"),
2576
													parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2577
												},
2578
												PosRange: posrange.PositionRange{
2579
													Start: 19,
2580
													End:   33,
2581
												},
2582
											},
2583
											Range:  2 * time.Second,
2584
											EndPos: 37,
2585
										},
2586
									},
2587
									PosRange: posrange.PositionRange{
2588
										Start: 14,
2589
										End:   38,
2590
									},
2591
								},
2592
								Range:     5 * time.Minute,
2593
								Timestamp: makeInt64Pointer(1603775091000),
2594
								EndPos:    56,
2595
							},
2596
						},
2597
						PosRange: posrange.PositionRange{
2598
							Start: 0,
2599
							End:   57,
2600
						},
2601
					},
2602
				},
2603
				Range:  4 * time.Minute,
2604
				Step:   3 * time.Second,
2605
				EndPos: 64,
2606
			},
2607
		},
2608
		{
2609
			input: `some_metric @ 123 offset 1m [10m:5s]`,
2610
			expected: &parser.SubqueryExpr{
2611
				Expr: &parser.StepInvariantExpr{
2612
					Expr: &parser.VectorSelector{
2613
						Name: "some_metric",
2614
						LabelMatchers: []*labels.Matcher{
2615
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2616
						},
2617
						PosRange: posrange.PositionRange{
2618
							Start: 0,
2619
							End:   27,
2620
						},
2621
						Timestamp:      makeInt64Pointer(123000),
2622
						OriginalOffset: 1 * time.Minute,
2623
					},
2624
				},
2625
				Range:  10 * time.Minute,
2626
				Step:   5 * time.Second,
2627
				EndPos: 36,
2628
			},
2629
		},
2630
		{
2631
			input: `some_metric[10m:5s] offset 1m @ 123`,
2632
			expected: &parser.StepInvariantExpr{
2633
				Expr: &parser.SubqueryExpr{
2634
					Expr: &parser.VectorSelector{
2635
						Name: "some_metric",
2636
						LabelMatchers: []*labels.Matcher{
2637
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2638
						},
2639
						PosRange: posrange.PositionRange{
2640
							Start: 0,
2641
							End:   11,
2642
						},
2643
					},
2644
					Timestamp:      makeInt64Pointer(123000),
2645
					OriginalOffset: 1 * time.Minute,
2646
					Range:          10 * time.Minute,
2647
					Step:           5 * time.Second,
2648
					EndPos:         35,
2649
				},
2650
			},
2651
		},
2652
		{
2653
			input: `(foo + bar{nm="val"} @ 1234)[5m:] @ 1603775019`,
2654
			expected: &parser.StepInvariantExpr{
2655
				Expr: &parser.SubqueryExpr{
2656
					Expr: &parser.ParenExpr{
2657
						Expr: &parser.BinaryExpr{
2658
							Op: parser.ADD,
2659
							VectorMatching: &parser.VectorMatching{
2660
								Card: parser.CardOneToOne,
2661
							},
2662
							LHS: &parser.VectorSelector{
2663
								Name: "foo",
2664
								LabelMatchers: []*labels.Matcher{
2665
									parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2666
								},
2667
								PosRange: posrange.PositionRange{
2668
									Start: 1,
2669
									End:   4,
2670
								},
2671
							},
2672
							RHS: &parser.StepInvariantExpr{
2673
								Expr: &parser.VectorSelector{
2674
									Name: "bar",
2675
									LabelMatchers: []*labels.Matcher{
2676
										parser.MustLabelMatcher(labels.MatchEqual, "nm", "val"),
2677
										parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"),
2678
									},
2679
									Timestamp: makeInt64Pointer(1234000),
2680
									PosRange: posrange.PositionRange{
2681
										Start: 7,
2682
										End:   27,
2683
									},
2684
								},
2685
							},
2686
						},
2687
						PosRange: posrange.PositionRange{
2688
							Start: 0,
2689
							End:   28,
2690
						},
2691
					},
2692
					Range:     5 * time.Minute,
2693
					Timestamp: makeInt64Pointer(1603775019000),
2694
					EndPos:    46,
2695
				},
2696
			},
2697
		},
2698
		{
2699
			input: "abs(abs(metric @ 10))",
2700
			expected: &parser.StepInvariantExpr{
2701
				Expr: &parser.Call{
2702
					Func: &parser.Function{
2703
						Name:       "abs",
2704
						ArgTypes:   []parser.ValueType{parser.ValueTypeVector},
2705
						ReturnType: parser.ValueTypeVector,
2706
					},
2707
					Args: parser.Expressions{&parser.Call{
2708
						Func: &parser.Function{
2709
							Name:       "abs",
2710
							ArgTypes:   []parser.ValueType{parser.ValueTypeVector},
2711
							ReturnType: parser.ValueTypeVector,
2712
						},
2713
						Args: parser.Expressions{&parser.VectorSelector{
2714
							Name: "metric",
2715
							LabelMatchers: []*labels.Matcher{
2716
								parser.MustLabelMatcher(labels.MatchEqual, "__name__", "metric"),
2717
							},
2718
							PosRange: posrange.PositionRange{
2719
								Start: 8,
2720
								End:   19,
2721
							},
2722
							Timestamp: makeInt64Pointer(10000),
2723
						}},
2724
						PosRange: posrange.PositionRange{
2725
							Start: 4,
2726
							End:   20,
2727
						},
2728
					}},
2729
					PosRange: posrange.PositionRange{
2730
						Start: 0,
2731
						End:   21,
2732
					},
2733
				},
2734
			},
2735
		},
2736
		{
2737
			input: "sum(sum(some_metric1 @ 10) + sum(some_metric2 @ 20))",
2738
			expected: &parser.StepInvariantExpr{
2739
				Expr: &parser.AggregateExpr{
2740
					Op: parser.SUM,
2741
					Expr: &parser.BinaryExpr{
2742
						Op:             parser.ADD,
2743
						VectorMatching: &parser.VectorMatching{},
2744
						LHS: &parser.AggregateExpr{
2745
							Op: parser.SUM,
2746
							Expr: &parser.VectorSelector{
2747
								Name: "some_metric1",
2748
								LabelMatchers: []*labels.Matcher{
2749
									parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric1"),
2750
								},
2751
								PosRange: posrange.PositionRange{
2752
									Start: 8,
2753
									End:   25,
2754
								},
2755
								Timestamp: makeInt64Pointer(10000),
2756
							},
2757
							PosRange: posrange.PositionRange{
2758
								Start: 4,
2759
								End:   26,
2760
							},
2761
						},
2762
						RHS: &parser.AggregateExpr{
2763
							Op: parser.SUM,
2764
							Expr: &parser.VectorSelector{
2765
								Name: "some_metric2",
2766
								LabelMatchers: []*labels.Matcher{
2767
									parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric2"),
2768
								},
2769
								PosRange: posrange.PositionRange{
2770
									Start: 33,
2771
									End:   50,
2772
								},
2773
								Timestamp: makeInt64Pointer(20000),
2774
							},
2775
							PosRange: posrange.PositionRange{
2776
								Start: 29,
2777
								End:   52,
2778
							},
2779
						},
2780
					},
2781
					PosRange: posrange.PositionRange{
2782
						Start: 0,
2783
						End:   52,
2784
					},
2785
				},
2786
			},
2787
		},
2788
		{
2789
			input: `foo @ start()`,
2790
			expected: &parser.StepInvariantExpr{
2791
				Expr: &parser.VectorSelector{
2792
					Name: "foo",
2793
					LabelMatchers: []*labels.Matcher{
2794
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2795
					},
2796
					PosRange: posrange.PositionRange{
2797
						Start: 0,
2798
						End:   13,
2799
					},
2800
					Timestamp:  makeInt64Pointer(timestamp.FromTime(startTime)),
2801
					StartOrEnd: parser.START,
2802
				},
2803
			},
2804
		},
2805
		{
2806
			input: `foo @ end()`,
2807
			expected: &parser.StepInvariantExpr{
2808
				Expr: &parser.VectorSelector{
2809
					Name: "foo",
2810
					LabelMatchers: []*labels.Matcher{
2811
						parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"),
2812
					},
2813
					PosRange: posrange.PositionRange{
2814
						Start: 0,
2815
						End:   11,
2816
					},
2817
					Timestamp:  makeInt64Pointer(timestamp.FromTime(endTime)),
2818
					StartOrEnd: parser.END,
2819
				},
2820
			},
2821
		},
2822
		{
2823
			input: `test[5y] @ start()`,
2824
			expected: &parser.StepInvariantExpr{
2825
				Expr: &parser.MatrixSelector{
2826
					VectorSelector: &parser.VectorSelector{
2827
						Name:       "test",
2828
						Timestamp:  makeInt64Pointer(timestamp.FromTime(startTime)),
2829
						StartOrEnd: parser.START,
2830
						LabelMatchers: []*labels.Matcher{
2831
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"),
2832
						},
2833
						PosRange: posrange.PositionRange{
2834
							Start: 0,
2835
							End:   4,
2836
						},
2837
					},
2838
					Range:  5 * 365 * 24 * time.Hour,
2839
					EndPos: 18,
2840
				},
2841
			},
2842
		},
2843
		{
2844
			input: `test[5y] @ end()`,
2845
			expected: &parser.StepInvariantExpr{
2846
				Expr: &parser.MatrixSelector{
2847
					VectorSelector: &parser.VectorSelector{
2848
						Name:       "test",
2849
						Timestamp:  makeInt64Pointer(timestamp.FromTime(endTime)),
2850
						StartOrEnd: parser.END,
2851
						LabelMatchers: []*labels.Matcher{
2852
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"),
2853
						},
2854
						PosRange: posrange.PositionRange{
2855
							Start: 0,
2856
							End:   4,
2857
						},
2858
					},
2859
					Range:  5 * 365 * 24 * time.Hour,
2860
					EndPos: 16,
2861
				},
2862
			},
2863
		},
2864
		{
2865
			input: `some_metric[10m:5s] @ start()`,
2866
			expected: &parser.StepInvariantExpr{
2867
				Expr: &parser.SubqueryExpr{
2868
					Expr: &parser.VectorSelector{
2869
						Name: "some_metric",
2870
						LabelMatchers: []*labels.Matcher{
2871
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2872
						},
2873
						PosRange: posrange.PositionRange{
2874
							Start: 0,
2875
							End:   11,
2876
						},
2877
					},
2878
					Timestamp:  makeInt64Pointer(timestamp.FromTime(startTime)),
2879
					StartOrEnd: parser.START,
2880
					Range:      10 * time.Minute,
2881
					Step:       5 * time.Second,
2882
					EndPos:     29,
2883
				},
2884
			},
2885
		},
2886
		{
2887
			input: `some_metric[10m:5s] @ end()`,
2888
			expected: &parser.StepInvariantExpr{
2889
				Expr: &parser.SubqueryExpr{
2890
					Expr: &parser.VectorSelector{
2891
						Name: "some_metric",
2892
						LabelMatchers: []*labels.Matcher{
2893
							parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2894
						},
2895
						PosRange: posrange.PositionRange{
2896
							Start: 0,
2897
							End:   11,
2898
						},
2899
					},
2900
					Timestamp:  makeInt64Pointer(timestamp.FromTime(endTime)),
2901
					StartOrEnd: parser.END,
2902
					Range:      10 * time.Minute,
2903
					Step:       5 * time.Second,
2904
					EndPos:     27,
2905
				},
2906
			},
2907
		},
2908
		{
2909
			input:      `floor(some_metric / (3 * 1024))`,
2910
			outputTest: true,
2911
			expected: &parser.Call{
2912
				Func: &parser.Function{
2913
					Name:       "floor",
2914
					ArgTypes:   []parser.ValueType{parser.ValueTypeVector},
2915
					ReturnType: parser.ValueTypeVector,
2916
				},
2917
				Args: parser.Expressions{
2918
					&parser.BinaryExpr{
2919
						Op: parser.DIV,
2920
						LHS: &parser.VectorSelector{
2921
							Name: "some_metric",
2922
							LabelMatchers: []*labels.Matcher{
2923
								parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"),
2924
							},
2925
							PosRange: posrange.PositionRange{
2926
								Start: 6,
2927
								End:   17,
2928
							},
2929
						},
2930
						RHS: &parser.StepInvariantExpr{
2931
							Expr: &parser.ParenExpr{
2932
								Expr: &parser.BinaryExpr{
2933
									Op: parser.MUL,
2934
									LHS: &parser.NumberLiteral{
2935
										Val: 3,
2936
										PosRange: posrange.PositionRange{
2937
											Start: 21,
2938
											End:   22,
2939
										},
2940
									},
2941
									RHS: &parser.NumberLiteral{
2942
										Val: 1024,
2943
										PosRange: posrange.PositionRange{
2944
											Start: 25,
2945
											End:   29,
2946
										},
2947
									},
2948
								},
2949
								PosRange: posrange.PositionRange{
2950
									Start: 20,
2951
									End:   30,
2952
								},
2953
							},
2954
						},
2955
					},
2956
				},
2957
				PosRange: posrange.PositionRange{
2958
					Start: 0,
2959
					End:   31,
2960
				},
2961
			},
2962
		},
2963
	}
2964

2965
	for _, test := range testCases {
2966
		t.Run(test.input, func(t *testing.T) {
2967
			expr, err := parser.ParseExpr(test.input)
2968
			require.NoError(t, err)
2969
			expr = promql.PreprocessExpr(expr, startTime, endTime)
2970
			if test.outputTest {
2971
				require.Equal(t, test.input, expr.String(), "error on input '%s'", test.input)
2972
			}
2973
			require.Equal(t, test.expected, expr, "error on input '%s'", test.input)
2974
		})
2975
	}
2976
}
2977

2978
func TestEngineOptsValidation(t *testing.T) {
2979
	cases := []struct {
2980
		opts     promql.EngineOpts
2981
		query    string
2982
		fail     bool
2983
		expError error
2984
	}{
2985
		{
2986
			opts:  promql.EngineOpts{EnableAtModifier: false},
2987
			query: "metric @ 100", fail: true, expError: promql.ErrValidationAtModifierDisabled,
2988
		}, {
2989
			opts:  promql.EngineOpts{EnableAtModifier: false},
2990
			query: "rate(metric[1m] @ 100)", fail: true, expError: promql.ErrValidationAtModifierDisabled,
2991
		}, {
2992
			opts:  promql.EngineOpts{EnableAtModifier: false},
2993
			query: "rate(metric[1h:1m] @ 100)", fail: true, expError: promql.ErrValidationAtModifierDisabled,
2994
		}, {
2995
			opts:  promql.EngineOpts{EnableAtModifier: false},
2996
			query: "metric @ start()", fail: true, expError: promql.ErrValidationAtModifierDisabled,
2997
		}, {
2998
			opts:  promql.EngineOpts{EnableAtModifier: false},
2999
			query: "rate(metric[1m] @ start())", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3000
		}, {
3001
			opts:  promql.EngineOpts{EnableAtModifier: false},
3002
			query: "rate(metric[1h:1m] @ start())", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3003
		}, {
3004
			opts:  promql.EngineOpts{EnableAtModifier: false},
3005
			query: "metric @ end()", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3006
		}, {
3007
			opts:  promql.EngineOpts{EnableAtModifier: false},
3008
			query: "rate(metric[1m] @ end())", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3009
		}, {
3010
			opts:  promql.EngineOpts{EnableAtModifier: false},
3011
			query: "rate(metric[1h:1m] @ end())", fail: true, expError: promql.ErrValidationAtModifierDisabled,
3012
		}, {
3013
			opts:  promql.EngineOpts{EnableAtModifier: true},
3014
			query: "metric @ 100",
3015
		}, {
3016
			opts:  promql.EngineOpts{EnableAtModifier: true},
3017
			query: "rate(metric[1m] @ start())",
3018
		}, {
3019
			opts:  promql.EngineOpts{EnableAtModifier: true},
3020
			query: "rate(metric[1h:1m] @ end())",
3021
		}, {
3022
			opts:  promql.EngineOpts{EnableNegativeOffset: false},
3023
			query: "metric offset -1s", fail: true, expError: promql.ErrValidationNegativeOffsetDisabled,
3024
		}, {
3025
			opts:  promql.EngineOpts{EnableNegativeOffset: true},
3026
			query: "metric offset -1s",
3027
		}, {
3028
			opts:  promql.EngineOpts{EnableAtModifier: true, EnableNegativeOffset: true},
3029
			query: "metric @ 100 offset -2m",
3030
		}, {
3031
			opts:  promql.EngineOpts{EnableAtModifier: true, EnableNegativeOffset: true},
3032
			query: "metric offset -2m @ 100",
3033
		},
3034
	}
3035

3036
	for _, c := range cases {
3037
		eng := promql.NewEngine(c.opts)
3038
		_, err1 := eng.NewInstantQuery(context.Background(), nil, nil, c.query, time.Unix(10, 0))
3039
		_, err2 := eng.NewRangeQuery(context.Background(), nil, nil, c.query, time.Unix(0, 0), time.Unix(10, 0), time.Second)
3040
		if c.fail {
3041
			require.Equal(t, c.expError, err1)
3042
			require.Equal(t, c.expError, err2)
3043
		} else {
3044
			require.NoError(t, err1)
3045
			require.NoError(t, err2)
3046
		}
3047
	}
3048
}
3049

3050
func TestRangeQuery(t *testing.T) {
3051
	cases := []struct {
3052
		Name     string
3053
		Load     string
3054
		Query    string
3055
		Result   parser.Value
3056
		Start    time.Time
3057
		End      time.Time
3058
		Interval time.Duration
3059
	}{
3060
		{
3061
			Name: "sum_over_time with all values",
3062
			Load: `load 30s
3063
              bar 0 1 10 100 1000`,
3064
			Query: "sum_over_time(bar[30s])",
3065
			Result: promql.Matrix{
3066
				promql.Series{
3067
					Floats: []promql.FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}},
3068
					Metric: labels.EmptyLabels(),
3069
				},
3070
			},
3071
			Start:    time.Unix(0, 0),
3072
			End:      time.Unix(120, 0),
3073
			Interval: 60 * time.Second,
3074
		},
3075
		{
3076
			Name: "sum_over_time with trailing values",
3077
			Load: `load 30s
3078
              bar 0 1 10 100 1000 0 0 0 0`,
3079
			Query: "sum_over_time(bar[30s])",
3080
			Result: promql.Matrix{
3081
				promql.Series{
3082
					Floats: []promql.FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}},
3083
					Metric: labels.EmptyLabels(),
3084
				},
3085
			},
3086
			Start:    time.Unix(0, 0),
3087
			End:      time.Unix(120, 0),
3088
			Interval: 60 * time.Second,
3089
		},
3090
		{
3091
			Name: "sum_over_time with all values long",
3092
			Load: `load 30s
3093
              bar 0 1 10 100 1000 10000 100000 1000000 10000000`,
3094
			Query: "sum_over_time(bar[30s])",
3095
			Result: promql.Matrix{
3096
				promql.Series{
3097
					Floats: []promql.FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}, {F: 110000, T: 180000}, {F: 11000000, T: 240000}},
3098
					Metric: labels.EmptyLabels(),
3099
				},
3100
			},
3101
			Start:    time.Unix(0, 0),
3102
			End:      time.Unix(240, 0),
3103
			Interval: 60 * time.Second,
3104
		},
3105
		{
3106
			Name: "sum_over_time with all values random",
3107
			Load: `load 30s
3108
              bar 5 17 42 2 7 905 51`,
3109
			Query: "sum_over_time(bar[30s])",
3110
			Result: promql.Matrix{
3111
				promql.Series{
3112
					Floats: []promql.FPoint{{F: 5, T: 0}, {F: 59, T: 60000}, {F: 9, T: 120000}, {F: 956, T: 180000}},
3113
					Metric: labels.EmptyLabels(),
3114
				},
3115
			},
3116
			Start:    time.Unix(0, 0),
3117
			End:      time.Unix(180, 0),
3118
			Interval: 60 * time.Second,
3119
		},
3120
		{
3121
			Name: "metric query",
3122
			Load: `load 30s
3123
              metric 1+1x4`,
3124
			Query: "metric",
3125
			Result: promql.Matrix{
3126
				promql.Series{
3127
					Floats: []promql.FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
3128
					Metric: labels.FromStrings("__name__", "metric"),
3129
				},
3130
			},
3131
			Start:    time.Unix(0, 0),
3132
			End:      time.Unix(120, 0),
3133
			Interval: 1 * time.Minute,
3134
		},
3135
		{
3136
			Name: "metric query with trailing values",
3137
			Load: `load 30s
3138
              metric 1+1x8`,
3139
			Query: "metric",
3140
			Result: promql.Matrix{
3141
				promql.Series{
3142
					Floats: []promql.FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
3143
					Metric: labels.FromStrings("__name__", "metric"),
3144
				},
3145
			},
3146
			Start:    time.Unix(0, 0),
3147
			End:      time.Unix(120, 0),
3148
			Interval: 1 * time.Minute,
3149
		},
3150
		{
3151
			Name: "short-circuit",
3152
			Load: `load 30s
3153
							foo{job="1"} 1+1x4
3154
							bar{job="2"} 1+1x4`,
3155
			Query: `foo > 2 or bar`,
3156
			Result: promql.Matrix{
3157
				promql.Series{
3158
					Floats: []promql.FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
3159
					Metric: labels.FromStrings(
3160
						"__name__", "bar",
3161
						"job", "2",
3162
					),
3163
				},
3164
				promql.Series{
3165
					Floats: []promql.FPoint{{F: 3, T: 60000}, {F: 5, T: 120000}},
3166
					Metric: labels.FromStrings(
3167
						"__name__", "foo",
3168
						"job", "1",
3169
					),
3170
				},
3171
			},
3172
			Start:    time.Unix(0, 0),
3173
			End:      time.Unix(120, 0),
3174
			Interval: 1 * time.Minute,
3175
		},
3176
		{
3177
			Name: "drop-metric-name",
3178
			Load: `load 30s
3179
							requests{job="1", __address__="bar"} 100`,
3180
			Query: `requests * 2`,
3181
			Result: promql.Matrix{
3182
				promql.Series{
3183
					Floats: []promql.FPoint{{F: 200, T: 0}, {F: 200, T: 60000}, {F: 200, T: 120000}},
3184
					Metric: labels.FromStrings(
3185
						"__address__", "bar",
3186
						"job", "1",
3187
					),
3188
				},
3189
			},
3190
			Start:    time.Unix(0, 0),
3191
			End:      time.Unix(120, 0),
3192
			Interval: 1 * time.Minute,
3193
		},
3194
	}
3195
	for _, c := range cases {
3196
		t.Run(c.Name, func(t *testing.T) {
3197
			engine := newTestEngine()
3198
			storage := promqltest.LoadedStorage(t, c.Load)
3199
			t.Cleanup(func() { storage.Close() })
3200

3201
			qry, err := engine.NewRangeQuery(context.Background(), storage, nil, c.Query, c.Start, c.End, c.Interval)
3202
			require.NoError(t, err)
3203

3204
			res := qry.Exec(context.Background())
3205
			require.NoError(t, res.Err)
3206
			testutil.RequireEqual(t, c.Result, res.Value)
3207
		})
3208
	}
3209
}
3210

3211
func TestNativeHistogram_Sum_Count_Add_AvgOperator(t *testing.T) {
3212
	// TODO(codesome): Integrate histograms into the PromQL testing framework
3213
	// and write more tests there.
3214
	cases := []struct {
3215
		histograms  []histogram.Histogram
3216
		expected    histogram.FloatHistogram
3217
		expectedAvg histogram.FloatHistogram
3218
	}{
3219
		{
3220
			histograms: []histogram.Histogram{
3221
				{
3222
					CounterResetHint: histogram.GaugeType,
3223
					Schema:           0,
3224
					Count:            25,
3225
					Sum:              1234.5,
3226
					ZeroThreshold:    0.001,
3227
					ZeroCount:        4,
3228
					PositiveSpans: []histogram.Span{
3229
						{Offset: 0, Length: 2},
3230
						{Offset: 1, Length: 2},
3231
					},
3232
					PositiveBuckets: []int64{1, 1, -1, 0},
3233
					NegativeSpans: []histogram.Span{
3234
						{Offset: 0, Length: 2},
3235
						{Offset: 2, Length: 2},
3236
					},
3237
					NegativeBuckets: []int64{2, 2, -3, 8},
3238
				},
3239
				{
3240
					CounterResetHint: histogram.GaugeType,
3241
					Schema:           0,
3242
					Count:            41,
3243
					Sum:              2345.6,
3244
					ZeroThreshold:    0.001,
3245
					ZeroCount:        5,
3246
					PositiveSpans: []histogram.Span{
3247
						{Offset: 0, Length: 4},
3248
						{Offset: 0, Length: 0},
3249
						{Offset: 0, Length: 3},
3250
					},
3251
					PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3252
					NegativeSpans: []histogram.Span{
3253
						{Offset: 1, Length: 4},
3254
						{Offset: 2, Length: 0},
3255
						{Offset: 2, Length: 3},
3256
					},
3257
					NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3258
				},
3259
				{
3260
					CounterResetHint: histogram.GaugeType,
3261
					Schema:           0,
3262
					Count:            41,
3263
					Sum:              1111.1,
3264
					ZeroThreshold:    0.001,
3265
					ZeroCount:        5,
3266
					PositiveSpans: []histogram.Span{
3267
						{Offset: 0, Length: 4},
3268
						{Offset: 0, Length: 0},
3269
						{Offset: 0, Length: 3},
3270
					},
3271
					PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3272
					NegativeSpans: []histogram.Span{
3273
						{Offset: 1, Length: 4},
3274
						{Offset: 2, Length: 0},
3275
						{Offset: 2, Length: 3},
3276
					},
3277
					NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3278
				},
3279
				{
3280
					CounterResetHint: histogram.GaugeType,
3281
					Schema:           1, // Everything is 0 just to make the count 4 so avg has nicer numbers.
3282
				},
3283
			},
3284
			expected: histogram.FloatHistogram{
3285
				CounterResetHint: histogram.GaugeType,
3286
				Schema:           0,
3287
				ZeroThreshold:    0.001,
3288
				ZeroCount:        14,
3289
				Count:            107,
3290
				Sum:              4691.2,
3291
				PositiveSpans: []histogram.Span{
3292
					{Offset: 0, Length: 7},
3293
				},
3294
				PositiveBuckets: []float64{3, 8, 2, 5, 3, 2, 2},
3295
				NegativeSpans: []histogram.Span{
3296
					{Offset: 0, Length: 6},
3297
					{Offset: 3, Length: 3},
3298
				},
3299
				NegativeBuckets: []float64{2, 6, 8, 4, 15, 9, 10, 10, 4},
3300
			},
3301
			expectedAvg: histogram.FloatHistogram{
3302
				CounterResetHint: histogram.GaugeType,
3303
				Schema:           0,
3304
				ZeroThreshold:    0.001,
3305
				ZeroCount:        3.5,
3306
				Count:            26.75,
3307
				Sum:              1172.8,
3308
				PositiveSpans: []histogram.Span{
3309
					{Offset: 0, Length: 7},
3310
				},
3311
				PositiveBuckets: []float64{0.75, 2, 0.5, 1.25, 0.75, 0.5, 0.5},
3312
				NegativeSpans: []histogram.Span{
3313
					{Offset: 0, Length: 6},
3314
					{Offset: 3, Length: 3},
3315
				},
3316
				NegativeBuckets: []float64{0.5, 1.5, 2, 1, 3.75, 2.25, 2.5, 2.5, 1},
3317
			},
3318
		},
3319
	}
3320

3321
	idx0 := int64(0)
3322
	for _, c := range cases {
3323
		for _, floatHisto := range []bool{true, false} {
3324
			t.Run(fmt.Sprintf("floatHistogram=%t %d", floatHisto, idx0), func(t *testing.T) {
3325
				storage := teststorage.New(t)
3326
				t.Cleanup(func() { storage.Close() })
3327

3328
				seriesName := "sparse_histogram_series"
3329
				seriesNameOverTime := "sparse_histogram_series_over_time"
3330

3331
				engine := newTestEngine()
3332

3333
				ts := idx0 * int64(10*time.Minute/time.Millisecond)
3334
				app := storage.Appender(context.Background())
3335
				_, err := app.Append(0, labels.FromStrings("__name__", "float_series", "idx", "0"), ts, 42)
3336
				require.NoError(t, err)
3337
				for idx1, h := range c.histograms {
3338
					lbls := labels.FromStrings("__name__", seriesName, "idx", strconv.Itoa(idx1))
3339
					// Since we mutate h later, we need to create a copy here.
3340
					var err error
3341
					if floatHisto {
3342
						_, err = app.AppendHistogram(0, lbls, ts, nil, h.Copy().ToFloat(nil))
3343
					} else {
3344
						_, err = app.AppendHistogram(0, lbls, ts, h.Copy(), nil)
3345
					}
3346
					require.NoError(t, err)
3347

3348
					lbls = labels.FromStrings("__name__", seriesNameOverTime)
3349
					newTs := ts + int64(idx1)*int64(time.Minute/time.Millisecond)
3350
					// Since we mutate h later, we need to create a copy here.
3351
					if floatHisto {
3352
						_, err = app.AppendHistogram(0, lbls, newTs, nil, h.Copy().ToFloat(nil))
3353
					} else {
3354
						_, err = app.AppendHistogram(0, lbls, newTs, h.Copy(), nil)
3355
					}
3356
					require.NoError(t, err)
3357
				}
3358
				require.NoError(t, app.Commit())
3359

3360
				queryAndCheck := func(queryString string, ts int64, exp promql.Vector) {
3361
					qry, err := engine.NewInstantQuery(context.Background(), storage, nil, queryString, timestamp.Time(ts))
3362
					require.NoError(t, err)
3363

3364
					res := qry.Exec(context.Background())
3365
					require.NoError(t, res.Err)
3366
					require.Empty(t, res.Warnings)
3367

3368
					vector, err := res.Vector()
3369
					require.NoError(t, err)
3370

3371
					testutil.RequireEqual(t, exp, vector)
3372
				}
3373
				queryAndCheckAnnotations := func(queryString string, ts int64, expWarnings annotations.Annotations) {
3374
					qry, err := engine.NewInstantQuery(context.Background(), storage, nil, queryString, timestamp.Time(ts))
3375
					require.NoError(t, err)
3376

3377
					res := qry.Exec(context.Background())
3378
					require.NoError(t, res.Err)
3379
					require.Equal(t, expWarnings, res.Warnings)
3380
				}
3381

3382
				// sum().
3383
				queryString := fmt.Sprintf("sum(%s)", seriesName)
3384
				queryAndCheck(queryString, ts, []promql.Sample{{T: ts, H: &c.expected, Metric: labels.EmptyLabels()}})
3385

3386
				queryString = `sum({idx="0"})`
3387
				var annos annotations.Annotations
3388
				annos.Add(annotations.NewMixedFloatsHistogramsAggWarning(posrange.PositionRange{Start: 4, End: 13}))
3389
				queryAndCheckAnnotations(queryString, ts, annos)
3390

3391
				// + operator.
3392
				queryString = fmt.Sprintf(`%s{idx="0"}`, seriesName)
3393
				for idx := 1; idx < len(c.histograms); idx++ {
3394
					queryString += fmt.Sprintf(` + ignoring(idx) %s{idx="%d"}`, seriesName, idx)
3395
				}
3396
				queryAndCheck(queryString, ts, []promql.Sample{{T: ts, H: &c.expected, Metric: labels.EmptyLabels()}})
3397

3398
				// count().
3399
				queryString = fmt.Sprintf("count(%s)", seriesName)
3400
				queryAndCheck(queryString, ts, []promql.Sample{{T: ts, F: 4, Metric: labels.EmptyLabels()}})
3401

3402
				// avg().
3403
				queryString = fmt.Sprintf("avg(%s)", seriesName)
3404
				queryAndCheck(queryString, ts, []promql.Sample{{T: ts, H: &c.expectedAvg, Metric: labels.EmptyLabels()}})
3405

3406
				offset := int64(len(c.histograms) - 1)
3407
				newTs := ts + offset*int64(time.Minute/time.Millisecond)
3408

3409
				// sum_over_time().
3410
				queryString = fmt.Sprintf("sum_over_time(%s[%dm:1m])", seriesNameOverTime, offset)
3411
				queryAndCheck(queryString, newTs, []promql.Sample{{T: newTs, H: &c.expected, Metric: labels.EmptyLabels()}})
3412

3413
				// avg_over_time().
3414
				queryString = fmt.Sprintf("avg_over_time(%s[%dm:1m])", seriesNameOverTime, offset)
3415
				queryAndCheck(queryString, newTs, []promql.Sample{{T: newTs, H: &c.expectedAvg, Metric: labels.EmptyLabels()}})
3416
			})
3417
			idx0++
3418
		}
3419
	}
3420
}
3421

3422
func TestNativeHistogram_SubOperator(t *testing.T) {
3423
	// TODO(codesome): Integrate histograms into the PromQL testing framework
3424
	// and write more tests there.
3425
	cases := []struct {
3426
		histograms []histogram.Histogram
3427
		expected   histogram.FloatHistogram
3428
	}{
3429
		{
3430
			histograms: []histogram.Histogram{
3431
				{
3432
					Schema:        0,
3433
					Count:         41,
3434
					Sum:           2345.6,
3435
					ZeroThreshold: 0.001,
3436
					ZeroCount:     5,
3437
					PositiveSpans: []histogram.Span{
3438
						{Offset: 0, Length: 4},
3439
						{Offset: 0, Length: 0},
3440
						{Offset: 0, Length: 3},
3441
					},
3442
					PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3443
					NegativeSpans: []histogram.Span{
3444
						{Offset: 1, Length: 4},
3445
						{Offset: 2, Length: 0},
3446
						{Offset: 2, Length: 3},
3447
					},
3448
					NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3449
				},
3450
				{
3451
					Schema:        0,
3452
					Count:         11,
3453
					Sum:           1234.5,
3454
					ZeroThreshold: 0.001,
3455
					ZeroCount:     3,
3456
					PositiveSpans: []histogram.Span{
3457
						{Offset: 1, Length: 2},
3458
					},
3459
					PositiveBuckets: []int64{2, -1},
3460
					NegativeSpans: []histogram.Span{
3461
						{Offset: 2, Length: 2},
3462
					},
3463
					NegativeBuckets: []int64{3, -1},
3464
				},
3465
			},
3466
			expected: histogram.FloatHistogram{
3467
				Schema:        0,
3468
				Count:         30,
3469
				Sum:           1111.1,
3470
				ZeroThreshold: 0.001,
3471
				ZeroCount:     2,
3472
				PositiveSpans: []histogram.Span{
3473
					{Offset: 0, Length: 2},
3474
					{Offset: 1, Length: 4},
3475
				},
3476
				PositiveBuckets: []float64{1, 1, 2, 1, 1, 1},
3477
				NegativeSpans: []histogram.Span{
3478
					{Offset: 1, Length: 2},
3479
					{Offset: 1, Length: 1},
3480
					{Offset: 4, Length: 3},
3481
				},
3482
				NegativeBuckets: []float64{1, 1, 7, 5, 5, 2},
3483
			},
3484
		},
3485
		{
3486
			histograms: []histogram.Histogram{
3487
				{
3488
					Schema:        0,
3489
					Count:         41,
3490
					Sum:           2345.6,
3491
					ZeroThreshold: 0.001,
3492
					ZeroCount:     5,
3493
					PositiveSpans: []histogram.Span{
3494
						{Offset: 0, Length: 4},
3495
						{Offset: 0, Length: 0},
3496
						{Offset: 0, Length: 3},
3497
					},
3498
					PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3499
					NegativeSpans: []histogram.Span{
3500
						{Offset: 1, Length: 4},
3501
						{Offset: 2, Length: 0},
3502
						{Offset: 2, Length: 3},
3503
					},
3504
					NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3505
				},
3506
				{
3507
					Schema:        1,
3508
					Count:         11,
3509
					Sum:           1234.5,
3510
					ZeroThreshold: 0.001,
3511
					ZeroCount:     3,
3512
					PositiveSpans: []histogram.Span{
3513
						{Offset: 1, Length: 2},
3514
					},
3515
					PositiveBuckets: []int64{2, -1},
3516
					NegativeSpans: []histogram.Span{
3517
						{Offset: 2, Length: 2},
3518
					},
3519
					NegativeBuckets: []int64{3, -1},
3520
				},
3521
			},
3522
			expected: histogram.FloatHistogram{
3523
				Schema:        0,
3524
				Count:         30,
3525
				Sum:           1111.1,
3526
				ZeroThreshold: 0.001,
3527
				ZeroCount:     2,
3528
				PositiveSpans: []histogram.Span{
3529
					{Offset: 0, Length: 1},
3530
					{Offset: 1, Length: 5},
3531
				},
3532
				PositiveBuckets: []float64{1, 1, 2, 1, 1, 1},
3533
				NegativeSpans: []histogram.Span{
3534
					{Offset: 1, Length: 4},
3535
					{Offset: 4, Length: 3},
3536
				},
3537
				NegativeBuckets: []float64{-2, 2, 2, 7, 5, 5, 2},
3538
			},
3539
		},
3540
		{
3541
			histograms: []histogram.Histogram{
3542
				{
3543
					Schema:        1,
3544
					Count:         11,
3545
					Sum:           1234.5,
3546
					ZeroThreshold: 0.001,
3547
					ZeroCount:     3,
3548
					PositiveSpans: []histogram.Span{
3549
						{Offset: 1, Length: 2},
3550
					},
3551
					PositiveBuckets: []int64{2, -1},
3552
					NegativeSpans: []histogram.Span{
3553
						{Offset: 2, Length: 2},
3554
					},
3555
					NegativeBuckets: []int64{3, -1},
3556
				},
3557
				{
3558
					Schema:        0,
3559
					Count:         41,
3560
					Sum:           2345.6,
3561
					ZeroThreshold: 0.001,
3562
					ZeroCount:     5,
3563
					PositiveSpans: []histogram.Span{
3564
						{Offset: 0, Length: 4},
3565
						{Offset: 0, Length: 0},
3566
						{Offset: 0, Length: 3},
3567
					},
3568
					PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
3569
					NegativeSpans: []histogram.Span{
3570
						{Offset: 1, Length: 4},
3571
						{Offset: 2, Length: 0},
3572
						{Offset: 2, Length: 3},
3573
					},
3574
					NegativeBuckets: []int64{1, 3, -2, 5, -2, 0, -3},
3575
				},
3576
			},
3577
			expected: histogram.FloatHistogram{
3578
				Schema:        0,
3579
				Count:         -30,
3580
				Sum:           -1111.1,
3581
				ZeroThreshold: 0.001,
3582
				ZeroCount:     -2,
3583
				PositiveSpans: []histogram.Span{
3584
					{Offset: 0, Length: 1},
3585
					{Offset: 1, Length: 5},
3586
				},
3587
				PositiveBuckets: []float64{-1, -1, -2, -1, -1, -1},
3588
				NegativeSpans: []histogram.Span{
3589
					{Offset: 1, Length: 4},
3590
					{Offset: 4, Length: 3},
3591
				},
3592
				NegativeBuckets: []float64{2, -2, -2, -7, -5, -5, -2},
3593
			},
3594
		},
3595
	}
3596

3597
	idx0 := int64(0)
3598
	for _, c := range cases {
3599
		for _, floatHisto := range []bool{true, false} {
3600
			t.Run(fmt.Sprintf("floatHistogram=%t %d", floatHisto, idx0), func(t *testing.T) {
3601
				engine := newTestEngine()
3602
				storage := teststorage.New(t)
3603
				t.Cleanup(func() { storage.Close() })
3604

3605
				seriesName := "sparse_histogram_series"
3606

3607
				ts := idx0 * int64(10*time.Minute/time.Millisecond)
3608
				app := storage.Appender(context.Background())
3609
				for idx1, h := range c.histograms {
3610
					lbls := labels.FromStrings("__name__", seriesName, "idx", strconv.Itoa(idx1))
3611
					// Since we mutate h later, we need to create a copy here.
3612
					var err error
3613
					if floatHisto {
3614
						_, err = app.AppendHistogram(0, lbls, ts, nil, h.Copy().ToFloat(nil))
3615
					} else {
3616
						_, err = app.AppendHistogram(0, lbls, ts, h.Copy(), nil)
3617
					}
3618
					require.NoError(t, err)
3619
				}
3620
				require.NoError(t, app.Commit())
3621

3622
				queryAndCheck := func(queryString string, exp promql.Vector) {
3623
					qry, err := engine.NewInstantQuery(context.Background(), storage, nil, queryString, timestamp.Time(ts))
3624
					require.NoError(t, err)
3625

3626
					res := qry.Exec(context.Background())
3627
					require.NoError(t, res.Err)
3628

3629
					vector, err := res.Vector()
3630
					require.NoError(t, err)
3631

3632
					if len(vector) == len(exp) {
3633
						for i, e := range exp {
3634
							got := vector[i].H
3635
							if got != e.H {
3636
								// Error messages are better if we compare structs, not pointers.
3637
								require.Equal(t, *e.H, *got)
3638
							}
3639
						}
3640
					}
3641

3642
					testutil.RequireEqual(t, exp, vector)
3643
				}
3644

3645
				// - operator.
3646
				queryString := fmt.Sprintf(`%s{idx="0"}`, seriesName)
3647
				for idx := 1; idx < len(c.histograms); idx++ {
3648
					queryString += fmt.Sprintf(` - ignoring(idx) %s{idx="%d"}`, seriesName, idx)
3649
				}
3650
				queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expected, Metric: labels.EmptyLabels()}})
3651
			})
3652
		}
3653
		idx0++
3654
	}
3655
}
3656

3657
func TestNativeHistogram_MulDivOperator(t *testing.T) {
3658
	// TODO(codesome): Integrate histograms into the PromQL testing framework
3659
	// and write more tests there.
3660
	originalHistogram := histogram.Histogram{
3661
		Schema:        0,
3662
		Count:         21,
3663
		Sum:           33,
3664
		ZeroThreshold: 0.001,
3665
		ZeroCount:     3,
3666
		PositiveSpans: []histogram.Span{
3667
			{Offset: 0, Length: 3},
3668
		},
3669
		PositiveBuckets: []int64{3, 0, 0},
3670
		NegativeSpans: []histogram.Span{
3671
			{Offset: 0, Length: 3},
3672
		},
3673
		NegativeBuckets: []int64{3, 0, 0},
3674
	}
3675

3676
	cases := []struct {
3677
		scalar      float64
3678
		histogram   histogram.Histogram
3679
		expectedMul histogram.FloatHistogram
3680
		expectedDiv histogram.FloatHistogram
3681
	}{
3682
		{
3683
			scalar:    3,
3684
			histogram: originalHistogram,
3685
			expectedMul: histogram.FloatHistogram{
3686
				Schema:        0,
3687
				Count:         63,
3688
				Sum:           99,
3689
				ZeroThreshold: 0.001,
3690
				ZeroCount:     9,
3691
				PositiveSpans: []histogram.Span{
3692
					{Offset: 0, Length: 3},
3693
				},
3694
				PositiveBuckets: []float64{9, 9, 9},
3695
				NegativeSpans: []histogram.Span{
3696
					{Offset: 0, Length: 3},
3697
				},
3698
				NegativeBuckets: []float64{9, 9, 9},
3699
			},
3700
			expectedDiv: histogram.FloatHistogram{
3701
				Schema:        0,
3702
				Count:         7,
3703
				Sum:           11,
3704
				ZeroThreshold: 0.001,
3705
				ZeroCount:     1,
3706
				PositiveSpans: []histogram.Span{
3707
					{Offset: 0, Length: 3},
3708
				},
3709
				PositiveBuckets: []float64{1, 1, 1},
3710
				NegativeSpans: []histogram.Span{
3711
					{Offset: 0, Length: 3},
3712
				},
3713
				NegativeBuckets: []float64{1, 1, 1},
3714
			},
3715
		},
3716
		{
3717
			scalar:    0,
3718
			histogram: originalHistogram,
3719
			expectedMul: histogram.FloatHistogram{
3720
				Schema:        0,
3721
				Count:         0,
3722
				Sum:           0,
3723
				ZeroThreshold: 0.001,
3724
				ZeroCount:     0,
3725
				PositiveSpans: []histogram.Span{
3726
					{Offset: 0, Length: 3},
3727
				},
3728
				PositiveBuckets: []float64{0, 0, 0},
3729
				NegativeSpans: []histogram.Span{
3730
					{Offset: 0, Length: 3},
3731
				},
3732
				NegativeBuckets: []float64{0, 0, 0},
3733
			},
3734
			expectedDiv: histogram.FloatHistogram{
3735
				Schema:        0,
3736
				Count:         math.Inf(1),
3737
				Sum:           math.Inf(1),
3738
				ZeroThreshold: 0.001,
3739
				ZeroCount:     math.Inf(1),
3740
				PositiveSpans: []histogram.Span{
3741
					{Offset: 0, Length: 3},
3742
				},
3743
				PositiveBuckets: []float64{math.Inf(1), math.Inf(1), math.Inf(1)},
3744
				NegativeSpans: []histogram.Span{
3745
					{Offset: 0, Length: 3},
3746
				},
3747
				NegativeBuckets: []float64{math.Inf(1), math.Inf(1), math.Inf(1)},
3748
			},
3749
		},
3750
	}
3751

3752
	idx0 := int64(0)
3753
	for _, c := range cases {
3754
		for _, floatHisto := range []bool{true, false} {
3755
			t.Run(fmt.Sprintf("floatHistogram=%t %d", floatHisto, idx0), func(t *testing.T) {
3756
				storage := teststorage.New(t)
3757
				t.Cleanup(func() { storage.Close() })
3758

3759
				seriesName := "sparse_histogram_series"
3760
				floatSeriesName := "float_series"
3761

3762
				engine := newTestEngine()
3763

3764
				ts := idx0 * int64(10*time.Minute/time.Millisecond)
3765
				app := storage.Appender(context.Background())
3766
				h := c.histogram
3767
				lbls := labels.FromStrings("__name__", seriesName)
3768
				// Since we mutate h later, we need to create a copy here.
3769
				var err error
3770
				if floatHisto {
3771
					_, err = app.AppendHistogram(0, lbls, ts, nil, h.Copy().ToFloat(nil))
3772
				} else {
3773
					_, err = app.AppendHistogram(0, lbls, ts, h.Copy(), nil)
3774
				}
3775
				require.NoError(t, err)
3776
				_, err = app.Append(0, labels.FromStrings("__name__", floatSeriesName), ts, c.scalar)
3777
				require.NoError(t, err)
3778
				require.NoError(t, app.Commit())
3779

3780
				queryAndCheck := func(queryString string, exp promql.Vector) {
3781
					qry, err := engine.NewInstantQuery(context.Background(), storage, nil, queryString, timestamp.Time(ts))
3782
					require.NoError(t, err)
3783

3784
					res := qry.Exec(context.Background())
3785
					require.NoError(t, res.Err)
3786

3787
					vector, err := res.Vector()
3788
					require.NoError(t, err)
3789

3790
					testutil.RequireEqual(t, exp, vector)
3791
				}
3792

3793
				// histogram * scalar.
3794
				queryString := fmt.Sprintf(`%s * %f`, seriesName, c.scalar)
3795
				queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
3796

3797
				// scalar * histogram.
3798
				queryString = fmt.Sprintf(`%f * %s`, c.scalar, seriesName)
3799
				queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
3800

3801
				// histogram * float.
3802
				queryString = fmt.Sprintf(`%s * %s`, seriesName, floatSeriesName)
3803
				queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
3804

3805
				// float * histogram.
3806
				queryString = fmt.Sprintf(`%s * %s`, floatSeriesName, seriesName)
3807
				queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
3808

3809
				// histogram / scalar.
3810
				queryString = fmt.Sprintf(`%s / %f`, seriesName, c.scalar)
3811
				queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedDiv, Metric: labels.EmptyLabels()}})
3812

3813
				// histogram / float.
3814
				queryString = fmt.Sprintf(`%s / %s`, seriesName, floatSeriesName)
3815
				queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedDiv, Metric: labels.EmptyLabels()}})
3816
			})
3817
			idx0++
3818
		}
3819
	}
3820
}
3821

3822
func TestQueryLookbackDelta(t *testing.T) {
3823
	var (
3824
		load = `load 5m
3825
metric 0 1 2
3826
`
3827
		query           = "metric"
3828
		lastDatapointTs = time.Unix(600, 0)
3829
	)
3830

3831
	cases := []struct {
3832
		name                          string
3833
		ts                            time.Time
3834
		engineLookback, queryLookback time.Duration
3835
		expectSamples                 bool
3836
	}{
3837
		{
3838
			name:          "default lookback delta",
3839
			ts:            lastDatapointTs.Add(defaultLookbackDelta),
3840
			expectSamples: true,
3841
		},
3842
		{
3843
			name:          "outside default lookback delta",
3844
			ts:            lastDatapointTs.Add(defaultLookbackDelta + time.Millisecond),
3845
			expectSamples: false,
3846
		},
3847
		{
3848
			name:           "custom engine lookback delta",
3849
			ts:             lastDatapointTs.Add(10 * time.Minute),
3850
			engineLookback: 10 * time.Minute,
3851
			expectSamples:  true,
3852
		},
3853
		{
3854
			name:           "outside custom engine lookback delta",
3855
			ts:             lastDatapointTs.Add(10*time.Minute + time.Millisecond),
3856
			engineLookback: 10 * time.Minute,
3857
			expectSamples:  false,
3858
		},
3859
		{
3860
			name:           "custom query lookback delta",
3861
			ts:             lastDatapointTs.Add(20 * time.Minute),
3862
			engineLookback: 10 * time.Minute,
3863
			queryLookback:  20 * time.Minute,
3864
			expectSamples:  true,
3865
		},
3866
		{
3867
			name:           "outside custom query lookback delta",
3868
			ts:             lastDatapointTs.Add(20*time.Minute + time.Millisecond),
3869
			engineLookback: 10 * time.Minute,
3870
			queryLookback:  20 * time.Minute,
3871
			expectSamples:  false,
3872
		},
3873
		{
3874
			name:           "negative custom query lookback delta",
3875
			ts:             lastDatapointTs.Add(20 * time.Minute),
3876
			engineLookback: -10 * time.Minute,
3877
			queryLookback:  20 * time.Minute,
3878
			expectSamples:  true,
3879
		},
3880
	}
3881

3882
	for _, c := range cases {
3883
		c := c
3884
		t.Run(c.name, func(t *testing.T) {
3885
			engine := promqltest.NewTestEngine(false, c.engineLookback, promqltest.DefaultMaxSamplesPerQuery)
3886
			storage := promqltest.LoadedStorage(t, load)
3887
			t.Cleanup(func() { storage.Close() })
3888

3889
			opts := promql.NewPrometheusQueryOpts(false, c.queryLookback)
3890
			qry, err := engine.NewInstantQuery(context.Background(), storage, opts, query, c.ts)
3891
			require.NoError(t, err)
3892

3893
			res := qry.Exec(context.Background())
3894
			require.NoError(t, res.Err)
3895
			vec, ok := res.Value.(promql.Vector)
3896
			require.True(t, ok)
3897
			if c.expectSamples {
3898
				require.NotEmpty(t, vec)
3899
			} else {
3900
				require.Empty(t, vec)
3901
			}
3902
		})
3903
	}
3904
}
3905

3906
func makeInt64Pointer(val int64) *int64 {
3907
	valp := new(int64)
3908
	*valp = val
3909
	return valp
3910
}
3911

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.