prometheus

Форк
0
/
merge_test.go 
1645 строк · 65.0 Кб
1
// Copyright 2020 The Prometheus Authors
2
// Licensed under the Apache License, Version 2.0 (the "License");
3
// you may not use this file except in compliance with the License.
4
// You may obtain a copy of the License at
5
//
6
// http://www.apache.org/licenses/LICENSE-2.0
7
//
8
// Unless required by applicable law or agreed to in writing, software
9
// distributed under the License is distributed on an "AS IS" BASIS,
10
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
// See the License for the specific language governing permissions and
12
// limitations under the License.
13

14
package storage
15

16
import (
17
	"context"
18
	"errors"
19
	"fmt"
20
	"math"
21
	"sort"
22
	"sync"
23
	"testing"
24

25
	"github.com/stretchr/testify/require"
26

27
	"github.com/prometheus/prometheus/model/histogram"
28
	"github.com/prometheus/prometheus/model/labels"
29
	"github.com/prometheus/prometheus/tsdb/chunkenc"
30
	"github.com/prometheus/prometheus/tsdb/chunks"
31
	"github.com/prometheus/prometheus/tsdb/tsdbutil"
32
	"github.com/prometheus/prometheus/util/annotations"
33
)
34

35
func TestMergeQuerierWithChainMerger(t *testing.T) {
36
	for _, tc := range []struct {
37
		name                 string
38
		primaryQuerierSeries []Series
39
		querierSeries        [][]Series
40
		extraQueriers        []Querier
41

42
		expected SeriesSet
43
	}{
44
		{
45
			name:                 "one primary querier with no series",
46
			primaryQuerierSeries: []Series{},
47
			expected:             NewMockSeriesSet(),
48
		},
49
		{
50
			name:          "one secondary querier with no series",
51
			querierSeries: [][]Series{{}},
52
			expected:      NewMockSeriesSet(),
53
		},
54
		{
55
			name:          "many secondary queriers with no series",
56
			querierSeries: [][]Series{{}, {}, {}, {}, {}, {}, {}},
57
			expected:      NewMockSeriesSet(),
58
		},
59
		{
60
			name:                 "mix of queriers with no series",
61
			primaryQuerierSeries: []Series{},
62
			querierSeries:        [][]Series{{}, {}, {}, {}, {}, {}, {}},
63
			expected:             NewMockSeriesSet(),
64
		},
65
		// Test rest of cases on secondary queriers as the different between primary vs secondary is just error handling.
66
		{
67
			name: "one querier, two series",
68
			querierSeries: [][]Series{{
69
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
70
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
71
			}},
72
			expected: NewMockSeriesSet(
73
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
74
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
75
			),
76
		},
77
		{
78
			name: "two queriers, one different series each",
79
			querierSeries: [][]Series{{
80
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
81
			}, {
82
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
83
			}},
84
			expected: NewMockSeriesSet(
85
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
86
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
87
			),
88
		},
89
		{
90
			name: "two time unsorted queriers, two series each",
91
			querierSeries: [][]Series{{
92
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}),
93
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
94
			}, {
95
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
96
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}),
97
			}},
98
			expected: NewMockSeriesSet(
99
				NewListSeries(
100
					labels.FromStrings("bar", "baz"),
101
					[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
102
				),
103
				NewListSeries(
104
					labels.FromStrings("foo", "bar"),
105
					[]chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
106
				),
107
			),
108
		},
109
		{
110
			name: "five queriers, only two queriers have two time unsorted series each",
111
			querierSeries: [][]Series{{}, {}, {
112
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}),
113
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
114
			}, {
115
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
116
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}),
117
			}, {}},
118
			expected: NewMockSeriesSet(
119
				NewListSeries(
120
					labels.FromStrings("bar", "baz"),
121
					[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
122
				),
123
				NewListSeries(
124
					labels.FromStrings("foo", "bar"),
125
					[]chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
126
				),
127
			),
128
		},
129
		{
130
			name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together",
131
			querierSeries: [][]Series{{}, {}, {
132
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}, fSample{6, 6}}),
133
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
134
			}, {
135
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
136
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}),
137
			}, {}},
138
			extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()},
139
			expected: NewMockSeriesSet(
140
				NewListSeries(
141
					labels.FromStrings("bar", "baz"),
142
					[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
143
				),
144
				NewListSeries(
145
					labels.FromStrings("foo", "bar"),
146
					[]chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
147
				),
148
			),
149
		},
150
		{
151
			name: "two queriers, with two series, one is overlapping",
152
			querierSeries: [][]Series{{}, {}, {
153
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 21}, fSample{3, 31}, fSample{5, 5}, fSample{6, 6}}),
154
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
155
			}, {
156
				NewListSeries(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 22}, fSample{3, 32}}),
157
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}, fSample{4, 4}}),
158
			}, {}},
159
			expected: NewMockSeriesSet(
160
				NewListSeries(
161
					labels.FromStrings("bar", "baz"),
162
					[]chunks.Sample{fSample{1, 1}, fSample{2, 21}, fSample{3, 31}, fSample{5, 5}, fSample{6, 6}},
163
				),
164
				NewListSeries(
165
					labels.FromStrings("foo", "bar"),
166
					[]chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
167
				),
168
			),
169
		},
170
		{
171
			name: "two queries, one with NaN samples series",
172
			querierSeries: [][]Series{{
173
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}),
174
			}, {
175
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{1, 1}}),
176
			}},
177
			expected: NewMockSeriesSet(
178
				NewListSeries(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}, fSample{1, 1}}),
179
			),
180
		},
181
	} {
182
		t.Run(tc.name, func(t *testing.T) {
183
			var p Querier
184
			if tc.primaryQuerierSeries != nil {
185
				p = &mockQuerier{toReturn: tc.primaryQuerierSeries}
186
			}
187
			var qs []Querier
188
			for _, in := range tc.querierSeries {
189
				qs = append(qs, &mockQuerier{toReturn: in})
190
			}
191
			qs = append(qs, tc.extraQueriers...)
192

193
			mergedQuerier := NewMergeQuerier([]Querier{p}, qs, ChainedSeriesMerge).Select(context.Background(), false, nil)
194

195
			// Get all merged series upfront to make sure there are no incorrectly retained shared
196
			// buffers causing bugs.
197
			var mergedSeries []Series
198
			for mergedQuerier.Next() {
199
				mergedSeries = append(mergedSeries, mergedQuerier.At())
200
			}
201
			require.NoError(t, mergedQuerier.Err())
202

203
			for _, actualSeries := range mergedSeries {
204
				require.True(t, tc.expected.Next(), "Expected Next() to be true")
205
				expectedSeries := tc.expected.At()
206
				require.Equal(t, expectedSeries.Labels(), actualSeries.Labels())
207

208
				expSmpl, expErr := ExpandSamples(expectedSeries.Iterator(nil), nil)
209
				actSmpl, actErr := ExpandSamples(actualSeries.Iterator(nil), nil)
210
				require.Equal(t, expErr, actErr)
211
				require.Equal(t, expSmpl, actSmpl)
212
			}
213
			require.False(t, tc.expected.Next(), "Expected Next() to be false")
214
		})
215
	}
216
}
217

218
func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) {
219
	for _, tc := range []struct {
220
		name                    string
221
		primaryChkQuerierSeries []ChunkSeries
222
		chkQuerierSeries        [][]ChunkSeries
223
		extraQueriers           []ChunkQuerier
224

225
		expected ChunkSeriesSet
226
	}{
227
		{
228
			name:                    "one primary querier with no series",
229
			primaryChkQuerierSeries: []ChunkSeries{},
230
			expected:                NewMockChunkSeriesSet(),
231
		},
232
		{
233
			name:             "one secondary querier with no series",
234
			chkQuerierSeries: [][]ChunkSeries{{}},
235
			expected:         NewMockChunkSeriesSet(),
236
		},
237
		{
238
			name:             "many secondary queriers with no series",
239
			chkQuerierSeries: [][]ChunkSeries{{}, {}, {}, {}, {}, {}, {}},
240
			expected:         NewMockChunkSeriesSet(),
241
		},
242
		{
243
			name:                    "mix of queriers with no series",
244
			primaryChkQuerierSeries: []ChunkSeries{},
245
			chkQuerierSeries:        [][]ChunkSeries{{}, {}, {}, {}, {}, {}, {}},
246
			expected:                NewMockChunkSeriesSet(),
247
		},
248
		// Test rest of cases on secondary queriers as the different between primary vs secondary is just error handling.
249
		{
250
			name: "one querier, two series",
251
			chkQuerierSeries: [][]ChunkSeries{{
252
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
253
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
254
			}},
255
			expected: NewMockChunkSeriesSet(
256
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
257
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
258
			),
259
		},
260
		{
261
			name: "two secondaries, one different series each",
262
			chkQuerierSeries: [][]ChunkSeries{{
263
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
264
			}, {
265
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
266
			}},
267
			expected: NewMockChunkSeriesSet(
268
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
269
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
270
			),
271
		},
272
		{
273
			name: "two secondaries, two not in time order series each",
274
			chkQuerierSeries: [][]ChunkSeries{{
275
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}),
276
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
277
			}, {
278
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
279
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}),
280
			}},
281
			expected: NewMockChunkSeriesSet(
282
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
283
					[]chunks.Sample{fSample{1, 1}, fSample{2, 2}},
284
					[]chunks.Sample{fSample{3, 3}},
285
					[]chunks.Sample{fSample{5, 5}},
286
					[]chunks.Sample{fSample{6, 6}},
287
				),
288
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
289
					[]chunks.Sample{fSample{0, 0}, fSample{1, 1}},
290
					[]chunks.Sample{fSample{2, 2}},
291
					[]chunks.Sample{fSample{3, 3}},
292
					[]chunks.Sample{fSample{4, 4}},
293
				),
294
			),
295
		},
296
		{
297
			name: "five secondaries, only two have two not in time order series each",
298
			chkQuerierSeries: [][]ChunkSeries{{}, {}, {
299
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}),
300
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
301
			}, {
302
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
303
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}),
304
			}, {}},
305
			expected: NewMockChunkSeriesSet(
306
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
307
					[]chunks.Sample{fSample{1, 1}, fSample{2, 2}},
308
					[]chunks.Sample{fSample{3, 3}},
309
					[]chunks.Sample{fSample{5, 5}},
310
					[]chunks.Sample{fSample{6, 6}},
311
				),
312
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
313
					[]chunks.Sample{fSample{0, 0}, fSample{1, 1}},
314
					[]chunks.Sample{fSample{2, 2}},
315
					[]chunks.Sample{fSample{3, 3}},
316
					[]chunks.Sample{fSample{4, 4}},
317
				),
318
			),
319
		},
320
		{
321
			name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together",
322
			chkQuerierSeries: [][]ChunkSeries{{
323
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{5, 5}}, []chunks.Sample{fSample{6, 6}}),
324
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}}, []chunks.Sample{fSample{2, 2}}),
325
			}, {
326
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
327
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{3, 3}}, []chunks.Sample{fSample{4, 4}}),
328
			}},
329
			extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()},
330
			expected: NewMockChunkSeriesSet(
331
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
332
					[]chunks.Sample{fSample{1, 1}, fSample{2, 2}},
333
					[]chunks.Sample{fSample{3, 3}},
334
					[]chunks.Sample{fSample{5, 5}},
335
					[]chunks.Sample{fSample{6, 6}},
336
				),
337
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
338
					[]chunks.Sample{fSample{0, 0}, fSample{1, 1}},
339
					[]chunks.Sample{fSample{2, 2}},
340
					[]chunks.Sample{fSample{3, 3}},
341
					[]chunks.Sample{fSample{4, 4}},
342
				),
343
			),
344
		},
345
		{
346
			name: "two queries, one with NaN samples series",
347
			chkQuerierSeries: [][]ChunkSeries{{
348
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}),
349
			}, {
350
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{1, 1}}),
351
			}},
352
			expected: NewMockChunkSeriesSet(
353
				NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []chunks.Sample{fSample{0, math.NaN()}}, []chunks.Sample{fSample{1, 1}}),
354
			),
355
		},
356
	} {
357
		t.Run(tc.name, func(t *testing.T) {
358
			var p ChunkQuerier
359
			if tc.primaryChkQuerierSeries != nil {
360
				p = &mockChunkQuerier{toReturn: tc.primaryChkQuerierSeries}
361
			}
362

363
			var qs []ChunkQuerier
364
			for _, in := range tc.chkQuerierSeries {
365
				qs = append(qs, &mockChunkQuerier{toReturn: in})
366
			}
367
			qs = append(qs, tc.extraQueriers...)
368

369
			merged := NewMergeChunkQuerier([]ChunkQuerier{p}, qs, NewCompactingChunkSeriesMerger(nil)).Select(context.Background(), false, nil)
370
			for merged.Next() {
371
				require.True(t, tc.expected.Next(), "Expected Next() to be true")
372
				actualSeries := merged.At()
373
				expectedSeries := tc.expected.At()
374
				require.Equal(t, expectedSeries.Labels(), actualSeries.Labels())
375

376
				expChks, expErr := ExpandChunks(expectedSeries.Iterator(nil))
377
				actChks, actErr := ExpandChunks(actualSeries.Iterator(nil))
378
				require.Equal(t, expErr, actErr)
379
				require.Equal(t, expChks, actChks)
380
			}
381
			require.NoError(t, merged.Err())
382
			require.False(t, tc.expected.Next(), "Expected Next() to be false")
383
		})
384
	}
385
}
386

387
func histogramSample(ts int64, hint histogram.CounterResetHint) hSample {
388
	h := tsdbutil.GenerateTestHistogram(int(ts + 1))
389
	h.CounterResetHint = hint
390
	return hSample{t: ts, h: h}
391
}
392

393
func floatHistogramSample(ts int64, hint histogram.CounterResetHint) fhSample {
394
	fh := tsdbutil.GenerateTestFloatHistogram(int(ts + 1))
395
	fh.CounterResetHint = hint
396
	return fhSample{t: ts, fh: fh}
397
}
398

399
// Shorthands for counter reset hints.
400
const (
401
	uk = histogram.UnknownCounterReset
402
	cr = histogram.CounterReset
403
	nr = histogram.NotCounterReset
404
	ga = histogram.GaugeType
405
)
406

407
func TestCompactingChunkSeriesMerger(t *testing.T) {
408
	m := NewCompactingChunkSeriesMerger(ChainedSeriesMerge)
409

410
	// histogramSample returns a histogram that is unique to the ts.
411
	histogramSample := func(ts int64) hSample {
412
		return histogramSample(ts, uk)
413
	}
414

415
	floatHistogramSample := func(ts int64) fhSample {
416
		return floatHistogramSample(ts, uk)
417
	}
418

419
	for _, tc := range []struct {
420
		name     string
421
		input    []ChunkSeries
422
		expected ChunkSeries
423
	}{
424
		{
425
			name: "single empty series",
426
			input: []ChunkSeries{
427
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
428
			},
429
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
430
		},
431
		{
432
			name: "single series",
433
			input: []ChunkSeries{
434
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
435
			},
436
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
437
		},
438
		{
439
			name: "two empty series",
440
			input: []ChunkSeries{
441
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
442
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
443
			},
444
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
445
		},
446
		{
447
			name: "two non overlapping",
448
			input: []ChunkSeries{
449
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}),
450
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
451
			},
452
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}, []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
453
		},
454
		{
455
			name: "two overlapping",
456
			input: []ChunkSeries{
457
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}}),
458
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
459
			},
460
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{7, 7}, fSample{8, 8}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
461
		},
462
		{
463
			name: "two duplicated",
464
			input: []ChunkSeries{
465
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
466
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
467
			},
468
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
469
		},
470
		{
471
			name: "three overlapping",
472
			input: []ChunkSeries{
473
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
474
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}),
475
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{4, 4}}),
476
			},
477
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}, fSample{6, 6}}),
478
		},
479
		{
480
			name: "three in chained overlap",
481
			input: []ChunkSeries{
482
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
483
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{4, 4}, fSample{6, 66}}),
484
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{6, 6}, fSample{10, 10}}),
485
			},
486
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}, fSample{6, 66}, fSample{10, 10}}),
487
		},
488
		{
489
			name: "three in chained overlap complex",
490
			input: []ChunkSeries{
491
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}}),
492
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}}),
493
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}}),
494
			},
495
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
496
				[]chunks.Sample{fSample{0, 0}, fSample{2, 2}, fSample{5, 5}, fSample{10, 10}, fSample{15, 15}, fSample{18, 18}, fSample{20, 20}, fSample{25, 25}, fSample{26, 26}, fSample{30, 30}},
497
				[]chunks.Sample{fSample{31, 31}, fSample{35, 35}},
498
			),
499
		},
500
		{
501
			name: "110 overlapping",
502
			input: []ChunkSeries{
503
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), chunks.GenerateSamples(0, 110)), // [0 - 110)
504
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), chunks.GenerateSamples(60, 50)), // [60 - 110)
505
			},
506
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
507
				chunks.GenerateSamples(0, 110),
508
			),
509
		},
510
		{
511
			name: "150 overlapping samples, split chunk",
512
			input: []ChunkSeries{
513
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), chunks.GenerateSamples(0, 90)),  // [0 - 90)
514
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), chunks.GenerateSamples(60, 90)), // [90 - 150)
515
			},
516
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
517
				chunks.GenerateSamples(0, 120),
518
				chunks.GenerateSamples(120, 30),
519
			),
520
		},
521
		{
522
			name: "histogram chunks overlapping",
523
			input: []ChunkSeries{
524
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{histogramSample(0), histogramSample(5)}, []chunks.Sample{histogramSample(10), histogramSample(15)}),
525
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{histogramSample(2), histogramSample(20)}, []chunks.Sample{histogramSample(25), histogramSample(30)}),
526
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{histogramSample(18), histogramSample(26)}, []chunks.Sample{histogramSample(31), histogramSample(35)}),
527
			},
528
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
529
				[]chunks.Sample{histogramSample(0), histogramSample(2), histogramSample(5), histogramSample(10), histogramSample(15), histogramSample(18), histogramSample(20), histogramSample(25), histogramSample(26), histogramSample(30)},
530
				[]chunks.Sample{histogramSample(31), histogramSample(35)},
531
			),
532
		},
533
		{
534
			name: "histogram chunks overlapping with float chunks",
535
			input: []ChunkSeries{
536
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{histogramSample(0), histogramSample(5)}, []chunks.Sample{histogramSample(10), histogramSample(15)}),
537
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{12, 12}}, []chunks.Sample{fSample{14, 14}}),
538
			},
539
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
540
				[]chunks.Sample{histogramSample(0)},
541
				[]chunks.Sample{fSample{1, 1}},
542
				[]chunks.Sample{histogramSample(5), histogramSample(10)},
543
				[]chunks.Sample{fSample{12, 12}, fSample{14, 14}},
544
				[]chunks.Sample{histogramSample(15)},
545
			),
546
		},
547
		{
548
			name: "float histogram chunks overlapping",
549
			input: []ChunkSeries{
550
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []chunks.Sample{floatHistogramSample(10), floatHistogramSample(15)}),
551
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{floatHistogramSample(2), floatHistogramSample(20)}, []chunks.Sample{floatHistogramSample(25), floatHistogramSample(30)}),
552
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{floatHistogramSample(18), floatHistogramSample(26)}, []chunks.Sample{floatHistogramSample(31), floatHistogramSample(35)}),
553
			},
554
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
555
				[]chunks.Sample{floatHistogramSample(0), floatHistogramSample(2), floatHistogramSample(5), floatHistogramSample(10), floatHistogramSample(15), floatHistogramSample(18), floatHistogramSample(20), floatHistogramSample(25), floatHistogramSample(26), floatHistogramSample(30)},
556
				[]chunks.Sample{floatHistogramSample(31), floatHistogramSample(35)},
557
			),
558
		},
559
		{
560
			name: "float histogram chunks overlapping with float chunks",
561
			input: []ChunkSeries{
562
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []chunks.Sample{floatHistogramSample(10), floatHistogramSample(15)}),
563
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{12, 12}}, []chunks.Sample{fSample{14, 14}}),
564
			},
565
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
566
				[]chunks.Sample{floatHistogramSample(0)},
567
				[]chunks.Sample{fSample{1, 1}},
568
				[]chunks.Sample{floatHistogramSample(5), floatHistogramSample(10)},
569
				[]chunks.Sample{fSample{12, 12}, fSample{14, 14}},
570
				[]chunks.Sample{floatHistogramSample(15)},
571
			),
572
		},
573
		{
574
			name: "float histogram chunks overlapping with histogram chunks",
575
			input: []ChunkSeries{
576
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []chunks.Sample{floatHistogramSample(10), floatHistogramSample(15)}),
577
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{histogramSample(1), histogramSample(12)}, []chunks.Sample{histogramSample(14)}),
578
			},
579
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
580
				[]chunks.Sample{floatHistogramSample(0)},
581
				[]chunks.Sample{histogramSample(1)},
582
				[]chunks.Sample{floatHistogramSample(5), floatHistogramSample(10)},
583
				[]chunks.Sample{histogramSample(12), histogramSample(14)},
584
				[]chunks.Sample{floatHistogramSample(15)},
585
			),
586
		},
587
	} {
588
		t.Run(tc.name, func(t *testing.T) {
589
			merged := m(tc.input...)
590
			require.Equal(t, tc.expected.Labels(), merged.Labels())
591
			actChks, actErr := ExpandChunks(merged.Iterator(nil))
592
			expChks, expErr := ExpandChunks(tc.expected.Iterator(nil))
593

594
			require.Equal(t, expErr, actErr)
595
			require.Equal(t, expChks, actChks)
596

597
			actSamples := chunks.ChunkMetasToSamples(actChks)
598
			expSamples := chunks.ChunkMetasToSamples(expChks)
599
			require.Equal(t, expSamples, actSamples)
600
		})
601
	}
602
}
603

604
func TestCompactingChunkSeriesMergerHistogramCounterResetHint(t *testing.T) {
605
	m := NewCompactingChunkSeriesMerger(ChainedSeriesMerge)
606

607
	for sampleType, sampleFunc := range map[string]func(int64, histogram.CounterResetHint) chunks.Sample{
608
		"histogram":       func(ts int64, hint histogram.CounterResetHint) chunks.Sample { return histogramSample(ts, hint) },
609
		"float histogram": func(ts int64, hint histogram.CounterResetHint) chunks.Sample { return floatHistogramSample(ts, hint) },
610
	} {
611
		for name, tc := range map[string]struct {
612
			input    []ChunkSeries
613
			expected ChunkSeries
614
		}{
615
			"histogram counter reset hint kept in single series": {
616
				input: []ChunkSeries{
617
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
618
						[]chunks.Sample{sampleFunc(0, cr), sampleFunc(5, uk)},
619
						[]chunks.Sample{sampleFunc(10, cr), sampleFunc(15, uk)},
620
					),
621
				},
622
				expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
623
					[]chunks.Sample{sampleFunc(0, cr), sampleFunc(5, uk)},
624
					[]chunks.Sample{sampleFunc(10, cr), sampleFunc(15, uk)},
625
				),
626
			},
627
			"histogram not counter reset hint kept in single series": {
628
				input: []ChunkSeries{
629
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
630
						[]chunks.Sample{sampleFunc(0, nr), sampleFunc(5, uk)},
631
						[]chunks.Sample{sampleFunc(10, nr), sampleFunc(15, uk)},
632
					),
633
				},
634
				expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
635
					[]chunks.Sample{sampleFunc(0, nr), sampleFunc(5, uk)},
636
					[]chunks.Sample{sampleFunc(10, nr), sampleFunc(15, uk)},
637
				),
638
			},
639
			"histogram counter reset hint kept in multiple equal series": {
640
				input: []ChunkSeries{
641
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
642
						[]chunks.Sample{sampleFunc(0, cr), sampleFunc(5, uk)},
643
						[]chunks.Sample{sampleFunc(10, cr), sampleFunc(15, uk)},
644
					),
645
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
646
						[]chunks.Sample{sampleFunc(0, cr), sampleFunc(5, uk)},
647
						[]chunks.Sample{sampleFunc(10, cr), sampleFunc(15, uk)},
648
					),
649
				},
650
				expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
651
					[]chunks.Sample{sampleFunc(0, cr), sampleFunc(5, uk)},
652
					[]chunks.Sample{sampleFunc(10, cr), sampleFunc(15, uk)},
653
				),
654
			},
655
			"histogram not counter reset hint kept in multiple equal series": {
656
				input: []ChunkSeries{
657
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
658
						[]chunks.Sample{sampleFunc(0, nr), sampleFunc(5, uk)},
659
						[]chunks.Sample{sampleFunc(10, nr), sampleFunc(15, uk)},
660
					),
661
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
662
						[]chunks.Sample{sampleFunc(0, nr), sampleFunc(5, uk)},
663
						[]chunks.Sample{sampleFunc(10, nr), sampleFunc(15, uk)},
664
					),
665
				},
666
				expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
667
					[]chunks.Sample{sampleFunc(0, nr), sampleFunc(5, uk)},
668
					[]chunks.Sample{sampleFunc(10, nr), sampleFunc(15, uk)},
669
				),
670
			},
671
			"histogram counter reset hint dropped from differing series": {
672
				input: []ChunkSeries{
673
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
674
						[]chunks.Sample{sampleFunc(0, cr), sampleFunc(5, uk)},
675
						[]chunks.Sample{sampleFunc(10, cr), sampleFunc(15, uk)},
676
					),
677
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
678
						[]chunks.Sample{sampleFunc(0, cr), sampleFunc(5, uk)},
679
						[]chunks.Sample{sampleFunc(10, cr), sampleFunc(12, uk), sampleFunc(15, uk)},
680
					),
681
				},
682
				expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
683
					[]chunks.Sample{sampleFunc(0, cr), sampleFunc(5, uk)},
684
					[]chunks.Sample{sampleFunc(10, uk), sampleFunc(12, uk), sampleFunc(15, uk)},
685
				),
686
			},
687
			"histogram counter not reset hint dropped from differing series": {
688
				input: []ChunkSeries{
689
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
690
						[]chunks.Sample{sampleFunc(0, nr), sampleFunc(5, uk)},
691
						[]chunks.Sample{sampleFunc(10, nr), sampleFunc(15, uk)},
692
					),
693
					NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
694
						[]chunks.Sample{sampleFunc(0, nr), sampleFunc(5, uk)},
695
						[]chunks.Sample{sampleFunc(10, nr), sampleFunc(12, uk), sampleFunc(15, uk)},
696
					),
697
				},
698
				expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
699
					[]chunks.Sample{sampleFunc(0, nr), sampleFunc(5, uk)},
700
					[]chunks.Sample{sampleFunc(10, uk), sampleFunc(12, uk), sampleFunc(15, uk)},
701
				),
702
			},
703
		} {
704
			t.Run(sampleType+"/"+name, func(t *testing.T) {
705
				merged := m(tc.input...)
706
				require.Equal(t, tc.expected.Labels(), merged.Labels())
707
				actChks, actErr := ExpandChunks(merged.Iterator(nil))
708
				expChks, expErr := ExpandChunks(tc.expected.Iterator(nil))
709

710
				require.Equal(t, expErr, actErr)
711
				require.Equal(t, expChks, actChks)
712

713
				actSamples := chunks.ChunkMetasToSamples(actChks)
714
				expSamples := chunks.ChunkMetasToSamples(expChks)
715
				require.Equal(t, expSamples, actSamples)
716
			})
717
		}
718
	}
719
}
720

721
func TestConcatenatingChunkSeriesMerger(t *testing.T) {
722
	m := NewConcatenatingChunkSeriesMerger()
723

724
	for _, tc := range []struct {
725
		name     string
726
		input    []ChunkSeries
727
		expected ChunkSeries
728
	}{
729
		{
730
			name: "single empty series",
731
			input: []ChunkSeries{
732
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
733
			},
734
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
735
		},
736
		{
737
			name: "single series",
738
			input: []ChunkSeries{
739
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
740
			},
741
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}}),
742
		},
743
		{
744
			name: "two empty series",
745
			input: []ChunkSeries{
746
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
747
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil),
748
			},
749
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), nil, nil),
750
		},
751
		{
752
			name: "two non overlapping",
753
			input: []ChunkSeries{
754
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}),
755
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
756
			},
757
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{5, 5}}, []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
758
		},
759
		{
760
			name: "two overlapping",
761
			input: []ChunkSeries{
762
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}}),
763
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}}),
764
			},
765
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
766
				[]chunks.Sample{fSample{1, 1}, fSample{2, 2}}, []chunks.Sample{fSample{3, 3}, fSample{8, 8}},
767
				[]chunks.Sample{fSample{7, 7}, fSample{9, 9}}, []chunks.Sample{fSample{10, 10}},
768
			),
769
		},
770
		{
771
			name: "two duplicated",
772
			input: []ChunkSeries{
773
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
774
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
775
			},
776
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
777
				[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
778
				[]chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
779
			),
780
		},
781
		{
782
			name: "three overlapping",
783
			input: []ChunkSeries{
784
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
785
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}),
786
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{4, 4}}),
787
			},
788
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
789
				[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
790
				[]chunks.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}},
791
				[]chunks.Sample{fSample{0, 0}, fSample{4, 4}},
792
			),
793
		},
794
		{
795
			name: "three in chained overlap",
796
			input: []ChunkSeries{
797
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
798
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{4, 4}, fSample{6, 66}}),
799
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{6, 6}, fSample{10, 10}}),
800
			},
801
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
802
				[]chunks.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
803
				[]chunks.Sample{fSample{4, 4}, fSample{6, 66}},
804
				[]chunks.Sample{fSample{6, 6}, fSample{10, 10}},
805
			),
806
		},
807
		{
808
			name: "three in chained overlap complex",
809
			input: []ChunkSeries{
810
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}}),
811
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}}),
812
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}}),
813
			},
814
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
815
				[]chunks.Sample{fSample{0, 0}, fSample{5, 5}}, []chunks.Sample{fSample{10, 10}, fSample{15, 15}},
816
				[]chunks.Sample{fSample{2, 2}, fSample{20, 20}}, []chunks.Sample{fSample{25, 25}, fSample{30, 30}},
817
				[]chunks.Sample{fSample{18, 18}, fSample{26, 26}}, []chunks.Sample{fSample{31, 31}, fSample{35, 35}},
818
			),
819
		},
820
		{
821
			name: "110 overlapping",
822
			input: []ChunkSeries{
823
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), chunks.GenerateSamples(0, 110)), // [0 - 110)
824
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), chunks.GenerateSamples(60, 50)), // [60 - 110)
825
			},
826
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
827
				chunks.GenerateSamples(0, 110),
828
				chunks.GenerateSamples(60, 50),
829
			),
830
		},
831
		{
832
			name: "150 overlapping samples, simply concatenated and no splits",
833
			input: []ChunkSeries{
834
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), chunks.GenerateSamples(0, 90)),  // [0 - 90)
835
				NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), chunks.GenerateSamples(60, 90)), // [90 - 150)
836
			},
837
			expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
838
				chunks.GenerateSamples(0, 90),
839
				chunks.GenerateSamples(60, 90),
840
			),
841
		},
842
	} {
843
		t.Run(tc.name, func(t *testing.T) {
844
			merged := m(tc.input...)
845
			require.Equal(t, tc.expected.Labels(), merged.Labels())
846
			actChks, actErr := ExpandChunks(merged.Iterator(nil))
847
			expChks, expErr := ExpandChunks(tc.expected.Iterator(nil))
848

849
			require.Equal(t, expErr, actErr)
850
			require.Equal(t, expChks, actChks)
851
		})
852
	}
853
}
854

855
func TestConcatenatingChunkIterator(t *testing.T) {
856
	chunk1, err := chunks.ChunkFromSamples([]chunks.Sample{fSample{t: 1, f: 10}})
857
	require.NoError(t, err)
858
	chunk2, err := chunks.ChunkFromSamples([]chunks.Sample{fSample{t: 2, f: 20}})
859
	require.NoError(t, err)
860
	chunk3, err := chunks.ChunkFromSamples([]chunks.Sample{fSample{t: 3, f: 30}})
861
	require.NoError(t, err)
862

863
	testError := errors.New("something went wrong")
864

865
	testCases := map[string]struct {
866
		iterators      []chunks.Iterator
867
		expectedChunks []chunks.Meta
868
		expectedError  error
869
	}{
870
		"many successful iterators": {
871
			iterators: []chunks.Iterator{
872
				NewListChunkSeriesIterator(chunk1, chunk2),
873
				NewListChunkSeriesIterator(chunk3),
874
			},
875
			expectedChunks: []chunks.Meta{chunk1, chunk2, chunk3},
876
		},
877
		"single failing iterator": {
878
			iterators: []chunks.Iterator{
879
				errChunksIterator{err: testError},
880
			},
881
			expectedError: testError,
882
		},
883
		"some failing and some successful iterators": {
884
			iterators: []chunks.Iterator{
885
				NewListChunkSeriesIterator(chunk1, chunk2),
886
				errChunksIterator{err: testError},
887
				NewListChunkSeriesIterator(chunk3),
888
			},
889
			expectedChunks: []chunks.Meta{chunk1, chunk2}, // Should stop before advancing to last iterator.
890
			expectedError:  testError,
891
		},
892
	}
893

894
	for name, testCase := range testCases {
895
		t.Run(name, func(t *testing.T) {
896
			it := concatenatingChunkIterator{iterators: testCase.iterators}
897
			var chks []chunks.Meta
898

899
			for it.Next() {
900
				chks = append(chks, it.At())
901
			}
902

903
			require.Equal(t, testCase.expectedChunks, chks)
904

905
			if testCase.expectedError == nil {
906
				require.NoError(t, it.Err())
907
			} else {
908
				require.EqualError(t, it.Err(), testCase.expectedError.Error())
909
			}
910
		})
911
	}
912
}
913

914
type mockQuerier struct {
915
	LabelQuerier
916

917
	toReturn []Series
918
}
919

920
type seriesByLabel []Series
921

922
func (a seriesByLabel) Len() int           { return len(a) }
923
func (a seriesByLabel) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
924
func (a seriesByLabel) Less(i, j int) bool { return labels.Compare(a[i].Labels(), a[j].Labels()) < 0 }
925

926
func (m *mockQuerier) Select(_ context.Context, sortSeries bool, _ *SelectHints, _ ...*labels.Matcher) SeriesSet {
927
	cpy := make([]Series, len(m.toReturn))
928
	copy(cpy, m.toReturn)
929
	if sortSeries {
930
		sort.Sort(seriesByLabel(cpy))
931
	}
932

933
	return NewMockSeriesSet(cpy...)
934
}
935

936
type mockChunkQuerier struct {
937
	LabelQuerier
938

939
	toReturn []ChunkSeries
940
}
941

942
type chunkSeriesByLabel []ChunkSeries
943

944
func (a chunkSeriesByLabel) Len() int      { return len(a) }
945
func (a chunkSeriesByLabel) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
946
func (a chunkSeriesByLabel) Less(i, j int) bool {
947
	return labels.Compare(a[i].Labels(), a[j].Labels()) < 0
948
}
949

950
func (m *mockChunkQuerier) Select(_ context.Context, sortSeries bool, _ *SelectHints, _ ...*labels.Matcher) ChunkSeriesSet {
951
	cpy := make([]ChunkSeries, len(m.toReturn))
952
	copy(cpy, m.toReturn)
953
	if sortSeries {
954
		sort.Sort(chunkSeriesByLabel(cpy))
955
	}
956

957
	return NewMockChunkSeriesSet(cpy...)
958
}
959

960
type mockSeriesSet struct {
961
	idx    int
962
	series []Series
963
}
964

965
func NewMockSeriesSet(series ...Series) SeriesSet {
966
	return &mockSeriesSet{
967
		idx:    -1,
968
		series: series,
969
	}
970
}
971

972
func (m *mockSeriesSet) Next() bool {
973
	m.idx++
974
	return m.idx < len(m.series)
975
}
976

977
func (m *mockSeriesSet) At() Series { return m.series[m.idx] }
978

979
func (m *mockSeriesSet) Err() error { return nil }
980

981
func (m *mockSeriesSet) Warnings() annotations.Annotations { return nil }
982

983
type mockChunkSeriesSet struct {
984
	idx    int
985
	series []ChunkSeries
986
}
987

988
func NewMockChunkSeriesSet(series ...ChunkSeries) ChunkSeriesSet {
989
	return &mockChunkSeriesSet{
990
		idx:    -1,
991
		series: series,
992
	}
993
}
994

995
func (m *mockChunkSeriesSet) Next() bool {
996
	m.idx++
997
	return m.idx < len(m.series)
998
}
999

1000
func (m *mockChunkSeriesSet) At() ChunkSeries { return m.series[m.idx] }
1001

1002
func (m *mockChunkSeriesSet) Err() error { return nil }
1003

1004
func (m *mockChunkSeriesSet) Warnings() annotations.Annotations { return nil }
1005

1006
func TestChainSampleIterator(t *testing.T) {
1007
	for sampleType, sampleFunc := range map[string]func(int64) chunks.Sample{
1008
		"float":           func(ts int64) chunks.Sample { return fSample{ts, float64(ts)} },
1009
		"histogram":       func(ts int64) chunks.Sample { return histogramSample(ts, uk) },
1010
		"float histogram": func(ts int64) chunks.Sample { return floatHistogramSample(ts, uk) },
1011
	} {
1012
		for name, tc := range map[string]struct {
1013
			input    []chunkenc.Iterator
1014
			expected []chunks.Sample
1015
		}{
1016
			"single iterator": {
1017
				input: []chunkenc.Iterator{
1018
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(1)}),
1019
				},
1020
				expected: []chunks.Sample{sampleFunc(0), sampleFunc(1)},
1021
			},
1022
			"non overlapping iterators": {
1023
				input: []chunkenc.Iterator{
1024
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(1)}),
1025
					NewListSeriesIterator(samples{sampleFunc(2), sampleFunc(3)}),
1026
				},
1027
				expected: []chunks.Sample{sampleFunc(0), sampleFunc(1), sampleFunc(2), sampleFunc(3)},
1028
			},
1029
			"overlapping but distinct iterators": {
1030
				input: []chunkenc.Iterator{
1031
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(3)}),
1032
					NewListSeriesIterator(samples{sampleFunc(1), sampleFunc(4)}),
1033
					NewListSeriesIterator(samples{sampleFunc(2), sampleFunc(5)}),
1034
				},
1035
				expected: []chunks.Sample{
1036
					sampleFunc(0), sampleFunc(1), sampleFunc(2), sampleFunc(3), sampleFunc(4), sampleFunc(5),
1037
				},
1038
			},
1039
			"overlapping iterators": {
1040
				input: []chunkenc.Iterator{
1041
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(1)}),
1042
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(2)}),
1043
					NewListSeriesIterator(samples{sampleFunc(2), sampleFunc(3)}),
1044
					NewListSeriesIterator(samples{}),
1045
					NewListSeriesIterator(samples{}),
1046
					NewListSeriesIterator(samples{}),
1047
				},
1048
				expected: []chunks.Sample{sampleFunc(0), sampleFunc(1), sampleFunc(2), sampleFunc(3)},
1049
			},
1050
		} {
1051
			t.Run(sampleType+"/"+name, func(t *testing.T) {
1052
				merged := ChainSampleIteratorFromIterators(nil, tc.input)
1053
				actual, err := ExpandSamples(merged, nil)
1054
				require.NoError(t, err)
1055
				require.Equal(t, tc.expected, actual)
1056
			})
1057
		}
1058
	}
1059
}
1060

1061
func TestChainSampleIteratorHistogramCounterResetHint(t *testing.T) {
1062
	for sampleType, sampleFunc := range map[string]func(int64, histogram.CounterResetHint) chunks.Sample{
1063
		"histogram":       func(ts int64, hint histogram.CounterResetHint) chunks.Sample { return histogramSample(ts, hint) },
1064
		"float histogram": func(ts int64, hint histogram.CounterResetHint) chunks.Sample { return floatHistogramSample(ts, hint) },
1065
	} {
1066
		for name, tc := range map[string]struct {
1067
			input    []chunkenc.Iterator
1068
			expected []chunks.Sample
1069
		}{
1070
			"single iterator": {
1071
				input: []chunkenc.Iterator{
1072
					NewListSeriesIterator(samples{sampleFunc(0, cr), sampleFunc(1, cr), sampleFunc(2, uk)}),
1073
				},
1074
				expected: []chunks.Sample{sampleFunc(0, uk), sampleFunc(1, cr), sampleFunc(2, uk)},
1075
			},
1076
			"single iterator gauge": {
1077
				input: []chunkenc.Iterator{
1078
					NewListSeriesIterator(samples{sampleFunc(0, ga), sampleFunc(1, ga), sampleFunc(2, ga)}),
1079
				},
1080
				expected: []chunks.Sample{sampleFunc(0, ga), sampleFunc(1, ga), sampleFunc(2, ga)},
1081
			},
1082
			"overlapping iterators gauge": {
1083
				input: []chunkenc.Iterator{
1084
					NewListSeriesIterator(samples{sampleFunc(0, ga), sampleFunc(1, ga), sampleFunc(2, ga), sampleFunc(4, ga)}),
1085
					NewListSeriesIterator(samples{sampleFunc(0, ga), sampleFunc(1, ga), sampleFunc(3, ga), sampleFunc(5, ga)}),
1086
				},
1087
				expected: []chunks.Sample{sampleFunc(0, ga), sampleFunc(1, ga), sampleFunc(2, ga), sampleFunc(3, ga), sampleFunc(4, ga), sampleFunc(5, ga)},
1088
			},
1089
			"non overlapping iterators": {
1090
				input: []chunkenc.Iterator{
1091
					NewListSeriesIterator(samples{sampleFunc(0, cr), sampleFunc(1, uk)}),
1092
					NewListSeriesIterator(samples{sampleFunc(2, cr), sampleFunc(3, cr)}),
1093
				},
1094
				expected: []chunks.Sample{sampleFunc(0, uk), sampleFunc(1, uk), sampleFunc(2, uk), sampleFunc(3, cr)},
1095
			},
1096
			"overlapping but distinct iterators": {
1097
				input: []chunkenc.Iterator{
1098
					NewListSeriesIterator(samples{sampleFunc(0, cr), sampleFunc(3, uk), sampleFunc(5, cr)}),
1099
					NewListSeriesIterator(samples{sampleFunc(1, uk), sampleFunc(2, cr), sampleFunc(4, cr)}),
1100
				},
1101
				expected: []chunks.Sample{
1102
					sampleFunc(0, uk), sampleFunc(1, uk), sampleFunc(2, cr), sampleFunc(3, uk), sampleFunc(4, uk), sampleFunc(5, uk),
1103
				},
1104
			},
1105
			"overlapping iterators": {
1106
				input: []chunkenc.Iterator{
1107
					NewListSeriesIterator(samples{sampleFunc(0, cr), sampleFunc(1, cr), sampleFunc(2, cr)}),
1108
					NewListSeriesIterator(samples{sampleFunc(0, cr), sampleFunc(1, cr), sampleFunc(2, cr)}),
1109
				},
1110
				expected: []chunks.Sample{sampleFunc(0, uk), sampleFunc(1, uk), sampleFunc(2, uk)},
1111
			},
1112
		} {
1113
			t.Run(sampleType+"/"+name, func(t *testing.T) {
1114
				merged := ChainSampleIteratorFromIterators(nil, tc.input)
1115
				actual, err := ExpandSamples(merged, nil)
1116
				require.NoError(t, err)
1117
				require.Equal(t, tc.expected, actual)
1118
			})
1119
		}
1120
	}
1121
}
1122

1123
func TestChainSampleIteratorSeek(t *testing.T) {
1124
	for sampleType, sampleFunc := range map[string]func(int64) chunks.Sample{
1125
		"float":           func(ts int64) chunks.Sample { return fSample{ts, float64(ts)} },
1126
		"histogram":       func(ts int64) chunks.Sample { return histogramSample(ts, uk) },
1127
		"float histogram": func(ts int64) chunks.Sample { return floatHistogramSample(ts, uk) },
1128
	} {
1129
		for name, tc := range map[string]struct {
1130
			input    []chunkenc.Iterator
1131
			seek     int64
1132
			expected []chunks.Sample
1133
		}{
1134
			"single iterator": {
1135
				input: []chunkenc.Iterator{
1136
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(1), sampleFunc(2)}),
1137
				},
1138
				seek:     1,
1139
				expected: []chunks.Sample{sampleFunc(1), sampleFunc(2)},
1140
			},
1141
			"non overlapping iterators": {
1142
				input: []chunkenc.Iterator{
1143
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(1)}),
1144
					NewListSeriesIterator(samples{sampleFunc(2), sampleFunc(3)}),
1145
				},
1146
				seek:     2,
1147
				expected: []chunks.Sample{sampleFunc(2), sampleFunc(3)},
1148
			},
1149
			"overlapping but distinct iterators": {
1150
				input: []chunkenc.Iterator{
1151
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(3)}),
1152
					NewListSeriesIterator(samples{sampleFunc(1), sampleFunc(4)}),
1153
					NewListSeriesIterator(samples{sampleFunc(2), sampleFunc(5)}),
1154
				},
1155
				seek:     2,
1156
				expected: []chunks.Sample{sampleFunc(2), sampleFunc(3), sampleFunc(4), sampleFunc(5)},
1157
			},
1158
			"overlapping iterators": {
1159
				input: []chunkenc.Iterator{
1160
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(2), sampleFunc(3)}),
1161
					NewListSeriesIterator(samples{sampleFunc(0), sampleFunc(1), sampleFunc(2)}),
1162
				},
1163
				seek:     0,
1164
				expected: []chunks.Sample{sampleFunc(0), sampleFunc(1), sampleFunc(2), sampleFunc(3)},
1165
			},
1166
		} {
1167
			t.Run(sampleType+"/"+name, func(t *testing.T) {
1168
				merged := ChainSampleIteratorFromIterators(nil, tc.input)
1169
				actual := []chunks.Sample{}
1170
				switch merged.Seek(tc.seek) {
1171
				case chunkenc.ValFloat:
1172
					t, f := merged.At()
1173
					actual = append(actual, fSample{t, f})
1174
				case chunkenc.ValHistogram:
1175
					t, h := merged.AtHistogram(nil)
1176
					actual = append(actual, hSample{t, h})
1177
				case chunkenc.ValFloatHistogram:
1178
					t, fh := merged.AtFloatHistogram(nil)
1179
					actual = append(actual, fhSample{t, fh})
1180
				}
1181
				s, err := ExpandSamples(merged, nil)
1182
				require.NoError(t, err)
1183
				actual = append(actual, s...)
1184
				require.Equal(t, tc.expected, actual)
1185
			})
1186
		}
1187
	}
1188
}
1189

1190
func TestChainSampleIteratorSeekFailingIterator(t *testing.T) {
1191
	merged := ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{
1192
		NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}),
1193
		errIterator{errors.New("something went wrong")},
1194
	})
1195

1196
	require.Equal(t, chunkenc.ValNone, merged.Seek(0))
1197
	require.EqualError(t, merged.Err(), "something went wrong")
1198
}
1199

1200
func TestChainSampleIteratorNextImmediatelyFailingIterator(t *testing.T) {
1201
	merged := ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{
1202
		NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}),
1203
		errIterator{errors.New("something went wrong")},
1204
	})
1205

1206
	require.Equal(t, chunkenc.ValNone, merged.Next())
1207
	require.EqualError(t, merged.Err(), "something went wrong")
1208

1209
	// Next() does some special handling for the first iterator, so make sure it handles the first iterator returning an error too.
1210
	merged = ChainSampleIteratorFromIterators(nil, []chunkenc.Iterator{
1211
		errIterator{errors.New("something went wrong")},
1212
		NewListSeriesIterator(samples{fSample{0, 0.1}, fSample{1, 1.1}, fSample{2, 2.1}}),
1213
	})
1214

1215
	require.Equal(t, chunkenc.ValNone, merged.Next())
1216
	require.EqualError(t, merged.Err(), "something went wrong")
1217
}
1218

1219
func TestChainSampleIteratorSeekHistogramCounterResetHint(t *testing.T) {
1220
	for sampleType, sampleFunc := range map[string]func(int64, histogram.CounterResetHint) chunks.Sample{
1221
		"histogram":       func(ts int64, hint histogram.CounterResetHint) chunks.Sample { return histogramSample(ts, hint) },
1222
		"float histogram": func(ts int64, hint histogram.CounterResetHint) chunks.Sample { return floatHistogramSample(ts, hint) },
1223
	} {
1224
		for name, tc := range map[string]struct {
1225
			input    []chunkenc.Iterator
1226
			seek     int64
1227
			expected []chunks.Sample
1228
		}{
1229
			"single iterator": {
1230
				input: []chunkenc.Iterator{
1231
					NewListSeriesIterator(samples{sampleFunc(0, cr), sampleFunc(1, cr), sampleFunc(2, uk)}),
1232
				},
1233
				seek:     1,
1234
				expected: []chunks.Sample{sampleFunc(1, uk), sampleFunc(2, uk)},
1235
			},
1236
			"non overlapping iterators": {
1237
				input: []chunkenc.Iterator{
1238
					NewListSeriesIterator(samples{sampleFunc(0, cr), sampleFunc(1, uk)}),
1239
					NewListSeriesIterator(samples{sampleFunc(2, cr), sampleFunc(3, cr)}),
1240
				},
1241
				seek:     2,
1242
				expected: []chunks.Sample{sampleFunc(2, uk), sampleFunc(3, cr)},
1243
			},
1244
			"non overlapping iterators seek to internal reset": {
1245
				input: []chunkenc.Iterator{
1246
					NewListSeriesIterator(samples{sampleFunc(0, cr), sampleFunc(1, uk)}),
1247
					NewListSeriesIterator(samples{sampleFunc(2, cr), sampleFunc(3, cr)}),
1248
				},
1249
				seek:     3,
1250
				expected: []chunks.Sample{sampleFunc(3, uk)},
1251
			},
1252
		} {
1253
			t.Run(sampleType+"/"+name, func(t *testing.T) {
1254
				merged := ChainSampleIteratorFromIterators(nil, tc.input)
1255
				actual := []chunks.Sample{}
1256
				switch merged.Seek(tc.seek) {
1257
				case chunkenc.ValFloat:
1258
					t, f := merged.At()
1259
					actual = append(actual, fSample{t, f})
1260
				case chunkenc.ValHistogram:
1261
					t, h := merged.AtHistogram(nil)
1262
					actual = append(actual, hSample{t, h})
1263
				case chunkenc.ValFloatHistogram:
1264
					t, fh := merged.AtFloatHistogram(nil)
1265
					actual = append(actual, fhSample{t, fh})
1266
				}
1267
				s, err := ExpandSamples(merged, nil)
1268
				require.NoError(t, err)
1269
				actual = append(actual, s...)
1270
				require.Equal(t, tc.expected, actual)
1271
			})
1272
		}
1273
	}
1274
}
1275

1276
func makeSeries(numSeries, numSamples int) []Series {
1277
	series := []Series{}
1278
	for j := 0; j < numSeries; j++ {
1279
		labels := labels.FromStrings("foo", fmt.Sprintf("bar%d", j))
1280
		samples := []chunks.Sample{}
1281
		for k := 0; k < numSamples; k++ {
1282
			samples = append(samples, fSample{t: int64(k), f: float64(k)})
1283
		}
1284
		series = append(series, NewListSeries(labels, samples))
1285
	}
1286
	return series
1287
}
1288

1289
func makeMergeSeriesSet(serieses [][]Series) SeriesSet {
1290
	seriesSets := make([]genericSeriesSet, len(serieses))
1291
	for i, s := range serieses {
1292
		seriesSets[i] = &genericSeriesSetAdapter{NewMockSeriesSet(s...)}
1293
	}
1294
	return &seriesSetAdapter{newGenericMergeSeriesSet(seriesSets, (&seriesMergerAdapter{VerticalSeriesMergeFunc: ChainedSeriesMerge}).Merge)}
1295
}
1296

1297
func benchmarkDrain(b *testing.B, makeSeriesSet func() SeriesSet) {
1298
	var err error
1299
	var t int64
1300
	var v float64
1301
	var iter chunkenc.Iterator
1302
	for n := 0; n < b.N; n++ {
1303
		seriesSet := makeSeriesSet()
1304
		for seriesSet.Next() {
1305
			iter = seriesSet.At().Iterator(iter)
1306
			for iter.Next() == chunkenc.ValFloat {
1307
				t, v = iter.At()
1308
			}
1309
			err = iter.Err()
1310
		}
1311
		require.NoError(b, err)
1312
		require.NotEqual(b, t, v) // To ensure the inner loop doesn't get optimised away.
1313
	}
1314
}
1315

1316
func BenchmarkNoMergeSeriesSet_100_100(b *testing.B) {
1317
	series := makeSeries(100, 100)
1318
	benchmarkDrain(b, func() SeriesSet { return NewMockSeriesSet(series...) })
1319
}
1320

1321
func BenchmarkMergeSeriesSet(b *testing.B) {
1322
	for _, bm := range []struct {
1323
		numSeriesSets, numSeries, numSamples int
1324
	}{
1325
		{1, 100, 100},
1326
		{10, 100, 100},
1327
		{100, 100, 100},
1328
	} {
1329
		serieses := [][]Series{}
1330
		for i := 0; i < bm.numSeriesSets; i++ {
1331
			serieses = append(serieses, makeSeries(bm.numSeries, bm.numSamples))
1332
		}
1333
		b.Run(fmt.Sprintf("%d_%d_%d", bm.numSeriesSets, bm.numSeries, bm.numSamples), func(b *testing.B) {
1334
			benchmarkDrain(b, func() SeriesSet { return makeMergeSeriesSet(serieses) })
1335
		})
1336
	}
1337
}
1338

1339
type mockGenericQuerier struct {
1340
	mtx sync.Mutex
1341

1342
	closed                bool
1343
	labelNamesCalls       int
1344
	labelNamesRequested   []labelNameRequest
1345
	sortedSeriesRequested []bool
1346

1347
	resp     []string
1348
	warnings annotations.Annotations
1349
	err      error
1350
}
1351

1352
type labelNameRequest struct {
1353
	name     string
1354
	matchers []*labels.Matcher
1355
}
1356

1357
func (m *mockGenericQuerier) Select(_ context.Context, b bool, _ *SelectHints, _ ...*labels.Matcher) genericSeriesSet {
1358
	m.mtx.Lock()
1359
	m.sortedSeriesRequested = append(m.sortedSeriesRequested, b)
1360
	m.mtx.Unlock()
1361
	return &mockGenericSeriesSet{resp: m.resp, warnings: m.warnings, err: m.err}
1362
}
1363

1364
func (m *mockGenericQuerier) LabelValues(_ context.Context, name string, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) {
1365
	m.mtx.Lock()
1366
	m.labelNamesRequested = append(m.labelNamesRequested, labelNameRequest{
1367
		name:     name,
1368
		matchers: matchers,
1369
	})
1370
	m.mtx.Unlock()
1371
	return m.resp, m.warnings, m.err
1372
}
1373

1374
func (m *mockGenericQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) {
1375
	m.mtx.Lock()
1376
	m.labelNamesCalls++
1377
	m.mtx.Unlock()
1378
	return m.resp, m.warnings, m.err
1379
}
1380

1381
func (m *mockGenericQuerier) Close() error {
1382
	m.closed = true
1383
	return nil
1384
}
1385

1386
type mockGenericSeriesSet struct {
1387
	resp     []string
1388
	warnings annotations.Annotations
1389
	err      error
1390

1391
	curr int
1392
}
1393

1394
func (m *mockGenericSeriesSet) Next() bool {
1395
	if m.err != nil {
1396
		return false
1397
	}
1398
	if m.curr >= len(m.resp) {
1399
		return false
1400
	}
1401
	m.curr++
1402
	return true
1403
}
1404

1405
func (m *mockGenericSeriesSet) Err() error                        { return m.err }
1406
func (m *mockGenericSeriesSet) Warnings() annotations.Annotations { return m.warnings }
1407

1408
func (m *mockGenericSeriesSet) At() Labels {
1409
	return mockLabels(m.resp[m.curr-1])
1410
}
1411

1412
type mockLabels string
1413

1414
func (l mockLabels) Labels() labels.Labels {
1415
	return labels.FromStrings("test", string(l))
1416
}
1417

1418
func unwrapMockGenericQuerier(t *testing.T, qr genericQuerier) *mockGenericQuerier {
1419
	m, ok := qr.(*mockGenericQuerier)
1420
	if !ok {
1421
		s, ok := qr.(*secondaryQuerier)
1422
		require.True(t, ok, "expected secondaryQuerier got something else")
1423
		m, ok = s.genericQuerier.(*mockGenericQuerier)
1424
		require.True(t, ok, "expected mockGenericQuerier got something else")
1425
	}
1426
	return m
1427
}
1428

1429
func TestMergeGenericQuerierWithSecondaries_ErrorHandling(t *testing.T) {
1430
	var (
1431
		errStorage  = errors.New("storage error")
1432
		warnStorage = errors.New("storage warning")
1433
		ctx         = context.Background()
1434
	)
1435
	for _, tcase := range []struct {
1436
		name     string
1437
		queriers []genericQuerier
1438

1439
		expectedSelectsSeries []labels.Labels
1440
		expectedLabels        []string
1441

1442
		expectedWarnings annotations.Annotations
1443
		expectedErrs     [4]error
1444
	}{
1445
		{
1446
			name:     "one successful primary querier",
1447
			queriers: []genericQuerier{&mockGenericQuerier{resp: []string{"a", "b"}, warnings: nil, err: nil}},
1448
			expectedSelectsSeries: []labels.Labels{
1449
				labels.FromStrings("test", "a"),
1450
				labels.FromStrings("test", "b"),
1451
			},
1452
			expectedLabels: []string{"a", "b"},
1453
		},
1454
		{
1455
			name: "multiple successful primary queriers",
1456
			queriers: []genericQuerier{
1457
				&mockGenericQuerier{resp: []string{"a", "b"}, warnings: nil, err: nil},
1458
				&mockGenericQuerier{resp: []string{"b", "c"}, warnings: nil, err: nil},
1459
			},
1460
			expectedSelectsSeries: []labels.Labels{
1461
				labels.FromStrings("test", "a"),
1462
				labels.FromStrings("test", "b"),
1463
				labels.FromStrings("test", "c"),
1464
			},
1465
			expectedLabels: []string{"a", "b", "c"},
1466
		},
1467
		{
1468
			name:         "one failed primary querier",
1469
			queriers:     []genericQuerier{&mockGenericQuerier{warnings: nil, err: errStorage}},
1470
			expectedErrs: [4]error{errStorage, errStorage, errStorage, errStorage},
1471
		},
1472
		{
1473
			name: "one successful primary querier with successful secondaries",
1474
			queriers: []genericQuerier{
1475
				&mockGenericQuerier{resp: []string{"a", "b"}, warnings: nil, err: nil},
1476
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"b"}, warnings: nil, err: nil}},
1477
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"c"}, warnings: nil, err: nil}},
1478
			},
1479
			expectedSelectsSeries: []labels.Labels{
1480
				labels.FromStrings("test", "a"),
1481
				labels.FromStrings("test", "b"),
1482
				labels.FromStrings("test", "c"),
1483
			},
1484
			expectedLabels: []string{"a", "b", "c"},
1485
		},
1486
		{
1487
			name: "one successful primary querier with empty response and successful secondaries",
1488
			queriers: []genericQuerier{
1489
				&mockGenericQuerier{resp: []string{}, warnings: nil, err: nil},
1490
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"b"}, warnings: nil, err: nil}},
1491
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"c"}, warnings: nil, err: nil}},
1492
			},
1493
			expectedSelectsSeries: []labels.Labels{
1494
				labels.FromStrings("test", "b"),
1495
				labels.FromStrings("test", "c"),
1496
			},
1497
			expectedLabels: []string{"b", "c"},
1498
		},
1499
		{
1500
			name: "one failed primary querier with successful secondaries",
1501
			queriers: []genericQuerier{
1502
				&mockGenericQuerier{warnings: nil, err: errStorage},
1503
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"b"}, warnings: nil, err: nil}},
1504
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"c"}, warnings: nil, err: nil}},
1505
			},
1506
			expectedErrs: [4]error{errStorage, errStorage, errStorage, errStorage},
1507
		},
1508
		{
1509
			name: "one successful primary querier with failed secondaries",
1510
			queriers: []genericQuerier{
1511
				&mockGenericQuerier{resp: []string{"a"}, warnings: nil, err: nil},
1512
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"b"}, warnings: nil, err: errStorage}},
1513
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"c"}, warnings: nil, err: errStorage}},
1514
			},
1515
			expectedSelectsSeries: []labels.Labels{
1516
				labels.FromStrings("test", "a"),
1517
			},
1518
			expectedLabels:   []string{"a"},
1519
			expectedWarnings: annotations.New().Add(errStorage),
1520
		},
1521
		{
1522
			name: "successful queriers with warnings",
1523
			queriers: []genericQuerier{
1524
				&mockGenericQuerier{resp: []string{"a"}, warnings: annotations.New().Add(warnStorage), err: nil},
1525
				&secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"b"}, warnings: annotations.New().Add(warnStorage), err: nil}},
1526
			},
1527
			expectedSelectsSeries: []labels.Labels{
1528
				labels.FromStrings("test", "a"),
1529
				labels.FromStrings("test", "b"),
1530
			},
1531
			expectedLabels:   []string{"a", "b"},
1532
			expectedWarnings: annotations.New().Add(warnStorage),
1533
		},
1534
	} {
1535
		t.Run(tcase.name, func(t *testing.T) {
1536
			q := &mergeGenericQuerier{
1537
				queriers: tcase.queriers,
1538
				mergeFn:  func(l ...Labels) Labels { return l[0] },
1539
			}
1540

1541
			t.Run("Select", func(t *testing.T) {
1542
				res := q.Select(context.Background(), false, nil)
1543
				var lbls []labels.Labels
1544
				for res.Next() {
1545
					lbls = append(lbls, res.At().Labels())
1546
				}
1547
				require.Subset(t, tcase.expectedWarnings, res.Warnings())
1548
				require.Equal(t, tcase.expectedErrs[0], res.Err())
1549
				require.ErrorIs(t, res.Err(), tcase.expectedErrs[0], "expected error doesn't match")
1550
				require.Equal(t, tcase.expectedSelectsSeries, lbls)
1551

1552
				for _, qr := range q.queriers {
1553
					m := unwrapMockGenericQuerier(t, qr)
1554

1555
					exp := []bool{true}
1556
					if len(q.queriers) == 1 {
1557
						exp[0] = false
1558
					}
1559
					require.Equal(t, exp, m.sortedSeriesRequested)
1560
				}
1561
			})
1562
			t.Run("LabelNames", func(t *testing.T) {
1563
				res, w, err := q.LabelNames(ctx)
1564
				require.Subset(t, tcase.expectedWarnings, w)
1565
				require.ErrorIs(t, err, tcase.expectedErrs[1], "expected error doesn't match")
1566
				require.Equal(t, tcase.expectedLabels, res)
1567

1568
				if err != nil {
1569
					return
1570
				}
1571
				for _, qr := range q.queriers {
1572
					m := unwrapMockGenericQuerier(t, qr)
1573

1574
					require.Equal(t, 1, m.labelNamesCalls)
1575
				}
1576
			})
1577
			t.Run("LabelValues", func(t *testing.T) {
1578
				res, w, err := q.LabelValues(ctx, "test")
1579
				require.Subset(t, tcase.expectedWarnings, w)
1580
				require.ErrorIs(t, err, tcase.expectedErrs[2], "expected error doesn't match")
1581
				require.Equal(t, tcase.expectedLabels, res)
1582

1583
				if err != nil {
1584
					return
1585
				}
1586
				for _, qr := range q.queriers {
1587
					m := unwrapMockGenericQuerier(t, qr)
1588

1589
					require.Equal(t, []labelNameRequest{{name: "test"}}, m.labelNamesRequested)
1590
				}
1591
			})
1592
			t.Run("LabelValuesWithMatchers", func(t *testing.T) {
1593
				matcher := labels.MustNewMatcher(labels.MatchEqual, "otherLabel", "someValue")
1594
				res, w, err := q.LabelValues(ctx, "test2", matcher)
1595
				require.Subset(t, tcase.expectedWarnings, w)
1596
				require.ErrorIs(t, err, tcase.expectedErrs[3], "expected error doesn't match")
1597
				require.Equal(t, tcase.expectedLabels, res)
1598

1599
				if err != nil {
1600
					return
1601
				}
1602
				for _, qr := range q.queriers {
1603
					m := unwrapMockGenericQuerier(t, qr)
1604

1605
					require.Equal(t, []labelNameRequest{
1606
						{name: "test"},
1607
						{name: "test2", matchers: []*labels.Matcher{matcher}},
1608
					}, m.labelNamesRequested)
1609
				}
1610
			})
1611
		})
1612
	}
1613
}
1614

1615
type errIterator struct {
1616
	err error
1617
}
1618

1619
func (e errIterator) Next() chunkenc.ValueType {
1620
	return chunkenc.ValNone
1621
}
1622

1623
func (e errIterator) Seek(t int64) chunkenc.ValueType {
1624
	return chunkenc.ValNone
1625
}
1626

1627
func (e errIterator) At() (int64, float64) {
1628
	return 0, 0
1629
}
1630

1631
func (e errIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogram) {
1632
	return 0, nil
1633
}
1634

1635
func (e errIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *histogram.FloatHistogram) {
1636
	return 0, nil
1637
}
1638

1639
func (e errIterator) AtT() int64 {
1640
	return 0
1641
}
1642

1643
func (e errIterator) Err() error {
1644
	return e.err
1645
}
1646

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.