prometheus

Форк
0
/
series_test.go 
481 строка · 14.5 Кб
1
// Copyright 2021 The Prometheus Authors
2
// Licensed under the Apache License, Version 2.0 (the "License");
3
// you may not use this file except in compliance with the License.
4
// You may obtain a copy of the License at
5
//
6
// http://www.apache.org/licenses/LICENSE-2.0
7
//
8
// Unless required by applicable law or agreed to in writing, software
9
// distributed under the License is distributed on an "AS IS" BASIS,
10
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
// See the License for the specific language governing permissions and
12
// limitations under the License.
13

14
package storage
15

16
import (
17
	"fmt"
18
	"math"
19
	"testing"
20

21
	"github.com/stretchr/testify/require"
22

23
	"github.com/prometheus/prometheus/model/histogram"
24
	"github.com/prometheus/prometheus/model/labels"
25
	"github.com/prometheus/prometheus/model/value"
26
	"github.com/prometheus/prometheus/tsdb/chunkenc"
27
	"github.com/prometheus/prometheus/tsdb/chunks"
28
)
29

30
func TestListSeriesIterator(t *testing.T) {
31
	it := NewListSeriesIterator(samples{
32
		fSample{0, 0},
33
		fSample{1, 1},
34
		fSample{1, 1.5},
35
		fSample{2, 2},
36
		fSample{3, 3},
37
	})
38

39
	// Seek to the first sample with ts=1.
40
	require.Equal(t, chunkenc.ValFloat, it.Seek(1))
41
	ts, v := it.At()
42
	require.Equal(t, int64(1), ts)
43
	require.Equal(t, 1., v)
44

45
	// Seek one further, next sample still has ts=1.
46
	require.Equal(t, chunkenc.ValFloat, it.Next())
47
	ts, v = it.At()
48
	require.Equal(t, int64(1), ts)
49
	require.Equal(t, 1.5, v)
50

51
	// Seek again to 1 and make sure we stay where we are.
52
	require.Equal(t, chunkenc.ValFloat, it.Seek(1))
53
	ts, v = it.At()
54
	require.Equal(t, int64(1), ts)
55
	require.Equal(t, 1.5, v)
56

57
	// Another seek.
58
	require.Equal(t, chunkenc.ValFloat, it.Seek(3))
59
	ts, v = it.At()
60
	require.Equal(t, int64(3), ts)
61
	require.Equal(t, 3., v)
62

63
	// And we don't go back.
64
	require.Equal(t, chunkenc.ValFloat, it.Seek(2))
65
	ts, v = it.At()
66
	require.Equal(t, int64(3), ts)
67
	require.Equal(t, 3., v)
68

69
	// Seek beyond the end.
70
	require.Equal(t, chunkenc.ValNone, it.Seek(5))
71
	// And we don't go back. (This exposes issue #10027.)
72
	require.Equal(t, chunkenc.ValNone, it.Seek(2))
73
}
74

75
// TestSeriesSetToChunkSet test the property of SeriesSet that says
76
// returned series should be iterable even after Next is called.
77
func TestChunkSeriesSetToSeriesSet(t *testing.T) {
78
	series := []struct {
79
		lbs     labels.Labels
80
		samples []chunks.Sample
81
	}{
82
		{
83
			lbs: labels.FromStrings("__name__", "up", "instance", "localhost:8080"),
84
			samples: []chunks.Sample{
85
				fSample{t: 1, f: 1},
86
				fSample{t: 2, f: 2},
87
				fSample{t: 3, f: 3},
88
				fSample{t: 4, f: 4},
89
			},
90
		}, {
91
			lbs: labels.FromStrings("__name__", "up", "instance", "localhost:8081"),
92
			samples: []chunks.Sample{
93
				fSample{t: 1, f: 2},
94
				fSample{t: 2, f: 3},
95
				fSample{t: 3, f: 4},
96
				fSample{t: 4, f: 5},
97
				fSample{t: 5, f: 6},
98
				fSample{t: 6, f: 7},
99
			},
100
		},
101
	}
102
	var chunkSeries []ChunkSeries
103
	for _, s := range series {
104
		chunkSeries = append(chunkSeries, NewListChunkSeriesFromSamples(s.lbs, s.samples))
105
	}
106
	css := NewMockChunkSeriesSet(chunkSeries...)
107

108
	ss := NewSeriesSetFromChunkSeriesSet(css)
109
	var ssSlice []Series
110
	for ss.Next() {
111
		ssSlice = append(ssSlice, ss.At())
112
	}
113
	require.Len(t, ssSlice, 2)
114
	var iter chunkenc.Iterator
115
	for i, s := range ssSlice {
116
		require.EqualValues(t, series[i].lbs, s.Labels())
117
		iter = s.Iterator(iter)
118
		j := 0
119
		for iter.Next() == chunkenc.ValFloat {
120
			ts, v := iter.At()
121
			require.EqualValues(t, fSample{t: ts, f: v}, series[i].samples[j])
122
			j++
123
		}
124
	}
125
}
126

127
type histogramTest struct {
128
	samples                     []chunks.Sample
129
	expectedCounterResetHeaders []chunkenc.CounterResetHeader
130
}
131

132
func TestHistogramSeriesToChunks(t *testing.T) {
133
	h1 := &histogram.Histogram{
134
		Count:         7,
135
		ZeroCount:     2,
136
		ZeroThreshold: 0.001,
137
		Sum:           100,
138
		Schema:        0,
139
		PositiveSpans: []histogram.Span{
140
			{Offset: 0, Length: 2},
141
		},
142
		PositiveBuckets: []int64{2, 1}, // Abs: 2, 3
143
	}
144
	// Appendable to h1.
145
	h2 := &histogram.Histogram{
146
		Count:         12,
147
		ZeroCount:     2,
148
		ZeroThreshold: 0.001,
149
		Sum:           100,
150
		Schema:        0,
151
		PositiveSpans: []histogram.Span{
152
			{Offset: 0, Length: 2},
153
			{Offset: 1, Length: 2},
154
		},
155
		PositiveBuckets: []int64{2, 1, -2, 3}, // Abs: 2, 3, 1, 4
156
	}
157
	// Implicit counter reset by reduction in buckets, not appendable.
158
	h2down := &histogram.Histogram{
159
		Count:         10,
160
		ZeroCount:     2,
161
		ZeroThreshold: 0.001,
162
		Sum:           100,
163
		Schema:        0,
164
		PositiveSpans: []histogram.Span{
165
			{Offset: 0, Length: 2},
166
			{Offset: 1, Length: 2},
167
		},
168
		PositiveBuckets: []int64{1, 1, -1, 3}, // Abs: 1, 2, 1, 4
169
	}
170

171
	fh1 := &histogram.FloatHistogram{
172
		Count:         6,
173
		ZeroCount:     2,
174
		ZeroThreshold: 0.001,
175
		Sum:           100,
176
		Schema:        0,
177
		PositiveSpans: []histogram.Span{
178
			{Offset: 0, Length: 2},
179
		},
180
		PositiveBuckets: []float64{3, 1},
181
	}
182
	// Appendable to fh1.
183
	fh2 := &histogram.FloatHistogram{
184
		Count:         17,
185
		ZeroCount:     2,
186
		ZeroThreshold: 0.001,
187
		Sum:           100,
188
		Schema:        0,
189
		PositiveSpans: []histogram.Span{
190
			{Offset: 0, Length: 2},
191
			{Offset: 1, Length: 2},
192
		},
193
		PositiveBuckets: []float64{4, 2, 7, 2},
194
	}
195
	// Implicit counter reset by reduction in buckets, not appendable.
196
	fh2down := &histogram.FloatHistogram{
197
		Count:         15,
198
		ZeroCount:     2,
199
		ZeroThreshold: 0.001,
200
		Sum:           100,
201
		Schema:        0,
202
		PositiveSpans: []histogram.Span{
203
			{Offset: 0, Length: 2},
204
			{Offset: 1, Length: 2},
205
		},
206
		PositiveBuckets: []float64{2, 2, 7, 2},
207
	}
208

209
	// Gauge histogram.
210
	gh1 := &histogram.Histogram{
211
		CounterResetHint: histogram.GaugeType,
212
		Count:            7,
213
		ZeroCount:        2,
214
		ZeroThreshold:    0.001,
215
		Sum:              100,
216
		Schema:           0,
217
		PositiveSpans: []histogram.Span{
218
			{Offset: 0, Length: 2},
219
		},
220
		PositiveBuckets: []int64{2, 1}, // Abs: 2, 3
221
	}
222
	gh2 := &histogram.Histogram{
223
		CounterResetHint: histogram.GaugeType,
224
		Count:            12,
225
		ZeroCount:        2,
226
		ZeroThreshold:    0.001,
227
		Sum:              100,
228
		Schema:           0,
229
		PositiveSpans: []histogram.Span{
230
			{Offset: 0, Length: 2},
231
			{Offset: 1, Length: 2},
232
		},
233
		PositiveBuckets: []int64{2, 1, -2, 3}, // Abs: 2, 3, 1, 4
234
	}
235

236
	// Float gauge histogram.
237
	gfh1 := &histogram.FloatHistogram{
238
		CounterResetHint: histogram.GaugeType,
239
		Count:            6,
240
		ZeroCount:        2,
241
		ZeroThreshold:    0.001,
242
		Sum:              100,
243
		Schema:           0,
244
		PositiveSpans: []histogram.Span{
245
			{Offset: 0, Length: 2},
246
		},
247
		PositiveBuckets: []float64{3, 1},
248
	}
249
	gfh2 := &histogram.FloatHistogram{
250
		CounterResetHint: histogram.GaugeType,
251
		Count:            17,
252
		ZeroCount:        2,
253
		ZeroThreshold:    0.001,
254
		Sum:              100,
255
		Schema:           0,
256
		PositiveSpans: []histogram.Span{
257
			{Offset: 0, Length: 2},
258
			{Offset: 1, Length: 2},
259
		},
260
		PositiveBuckets: []float64{4, 2, 7, 2},
261
	}
262

263
	staleHistogram := &histogram.Histogram{
264
		Sum: math.Float64frombits(value.StaleNaN),
265
	}
266
	staleFloatHistogram := &histogram.FloatHistogram{
267
		Sum: math.Float64frombits(value.StaleNaN),
268
	}
269

270
	tests := map[string]histogramTest{
271
		"single histogram to single chunk": {
272
			samples: []chunks.Sample{
273
				hSample{t: 1, h: h1},
274
			},
275
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset},
276
		},
277
		"two histograms encoded to a single chunk": {
278
			samples: []chunks.Sample{
279
				hSample{t: 1, h: h1},
280
				hSample{t: 2, h: h2},
281
			},
282
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset},
283
		},
284
		"two histograms encoded to two chunks": {
285
			samples: []chunks.Sample{
286
				hSample{t: 1, h: h2},
287
				hSample{t: 2, h: h1},
288
			},
289
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.CounterReset},
290
		},
291
		"histogram and stale sample encoded to two chunks": {
292
			samples: []chunks.Sample{
293
				hSample{t: 1, h: staleHistogram},
294
				hSample{t: 2, h: h1},
295
			},
296
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
297
		},
298
		"histogram and reduction in bucket encoded to two chunks": {
299
			samples: []chunks.Sample{
300
				hSample{t: 1, h: h1},
301
				hSample{t: 2, h: h2down},
302
			},
303
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.CounterReset},
304
		},
305
		// Float histograms.
306
		"single float histogram to single chunk": {
307
			samples: []chunks.Sample{
308
				fhSample{t: 1, fh: fh1},
309
			},
310
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset},
311
		},
312
		"two float histograms encoded to a single chunk": {
313
			samples: []chunks.Sample{
314
				fhSample{t: 1, fh: fh1},
315
				fhSample{t: 2, fh: fh2},
316
			},
317
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset},
318
		},
319
		"two float histograms encoded to two chunks": {
320
			samples: []chunks.Sample{
321
				fhSample{t: 1, fh: fh2},
322
				fhSample{t: 2, fh: fh1},
323
			},
324
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.CounterReset},
325
		},
326
		"float histogram and stale sample encoded to two chunks": {
327
			samples: []chunks.Sample{
328
				fhSample{t: 1, fh: staleFloatHistogram},
329
				fhSample{t: 2, fh: fh1},
330
			},
331
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
332
		},
333
		"float histogram and reduction in bucket encoded to two chunks": {
334
			samples: []chunks.Sample{
335
				fhSample{t: 1, fh: fh1},
336
				fhSample{t: 2, fh: fh2down},
337
			},
338
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.CounterReset},
339
		},
340
		// Mixed.
341
		"histogram and float histogram encoded to two chunks": {
342
			samples: []chunks.Sample{
343
				hSample{t: 1, h: h1},
344
				fhSample{t: 2, fh: fh2},
345
			},
346
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
347
		},
348
		"float histogram and histogram encoded to two chunks": {
349
			samples: []chunks.Sample{
350
				fhSample{t: 1, fh: fh1},
351
				hSample{t: 2, h: h2},
352
			},
353
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
354
		},
355
		"histogram and stale float histogram encoded to two chunks": {
356
			samples: []chunks.Sample{
357
				hSample{t: 1, h: h1},
358
				fhSample{t: 2, fh: staleFloatHistogram},
359
			},
360
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
361
		},
362
		"single gauge histogram encoded to one chunk": {
363
			samples: []chunks.Sample{
364
				hSample{t: 1, h: gh1},
365
			},
366
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
367
		},
368
		"two gauge histograms encoded to one chunk when counter increases": {
369
			samples: []chunks.Sample{
370
				hSample{t: 1, h: gh1},
371
				hSample{t: 2, h: gh2},
372
			},
373
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
374
		},
375
		"two gauge histograms encoded to one chunk when counter decreases": {
376
			samples: []chunks.Sample{
377
				hSample{t: 1, h: gh2},
378
				hSample{t: 2, h: gh1},
379
			},
380
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
381
		},
382
		"single gauge float histogram encoded to one chunk": {
383
			samples: []chunks.Sample{
384
				fhSample{t: 1, fh: gfh1},
385
			},
386
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
387
		},
388
		"two float gauge histograms encoded to one chunk when counter increases": {
389
			samples: []chunks.Sample{
390
				fhSample{t: 1, fh: gfh1},
391
				fhSample{t: 2, fh: gfh2},
392
			},
393
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
394
		},
395
		"two float gauge histograms encoded to one chunk when counter decreases": {
396
			samples: []chunks.Sample{
397
				fhSample{t: 1, fh: gfh2},
398
				fhSample{t: 2, fh: gfh1},
399
			},
400
			expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
401
		},
402
	}
403

404
	for testName, test := range tests {
405
		t.Run(testName, func(t *testing.T) {
406
			testHistogramsSeriesToChunks(t, test)
407
		})
408
	}
409
}
410

411
func testHistogramsSeriesToChunks(t *testing.T, test histogramTest) {
412
	lbs := labels.FromStrings("__name__", "up", "instance", "localhost:8080")
413
	copiedSamples := []chunks.Sample{}
414
	for _, s := range test.samples {
415
		switch cs := s.(type) {
416
		case hSample:
417
			copiedSamples = append(copiedSamples, hSample{t: cs.t, h: cs.h.Copy()})
418
		case fhSample:
419
			copiedSamples = append(copiedSamples, fhSample{t: cs.t, fh: cs.fh.Copy()})
420
		default:
421
			t.Error("internal error, unexpected type")
422
		}
423
	}
424
	series := NewListSeries(lbs, copiedSamples)
425
	encoder := NewSeriesToChunkEncoder(series)
426
	require.EqualValues(t, lbs, encoder.Labels())
427

428
	chks, err := ExpandChunks(encoder.Iterator(nil))
429
	require.NoError(t, err)
430
	require.Equal(t, len(test.expectedCounterResetHeaders), len(chks))
431

432
	// Decode all encoded samples and assert they are equal to the original ones.
433
	encodedSamples := chunks.ChunkMetasToSamples(chks)
434
	require.Equal(t, len(test.samples), len(encodedSamples))
435

436
	for i, s := range test.samples {
437
		encodedSample := encodedSamples[i]
438
		switch expectedSample := s.(type) {
439
		case hSample:
440
			require.Equal(t, chunkenc.ValHistogram, encodedSample.Type(), "expect histogram", fmt.Sprintf("at idx %d", i))
441
			h := encodedSample.H()
442
			// Ignore counter reset if not gauge here, will check on chunk level.
443
			if expectedSample.h.CounterResetHint != histogram.GaugeType {
444
				h.CounterResetHint = histogram.UnknownCounterReset
445
			}
446
			if value.IsStaleNaN(expectedSample.h.Sum) {
447
				require.True(t, value.IsStaleNaN(h.Sum), fmt.Sprintf("at idx %d", i))
448
				continue
449
			}
450
			require.Equal(t, *expectedSample.h, *h.Compact(0), fmt.Sprintf("at idx %d", i))
451
		case fhSample:
452
			require.Equal(t, chunkenc.ValFloatHistogram, encodedSample.Type(), "expect float histogram", fmt.Sprintf("at idx %d", i))
453
			fh := encodedSample.FH()
454
			// Ignore counter reset if not gauge here, will check on chunk level.
455
			if expectedSample.fh.CounterResetHint != histogram.GaugeType {
456
				fh.CounterResetHint = histogram.UnknownCounterReset
457
			}
458
			if value.IsStaleNaN(expectedSample.fh.Sum) {
459
				require.True(t, value.IsStaleNaN(fh.Sum), fmt.Sprintf("at idx %d", i))
460
				continue
461
			}
462
			require.Equal(t, *expectedSample.fh, *fh.Compact(0), fmt.Sprintf("at idx %d", i))
463
		default:
464
			t.Error("internal error, unexpected type")
465
		}
466
	}
467

468
	for i, expectedCounterResetHint := range test.expectedCounterResetHeaders {
469
		require.Equal(t, expectedCounterResetHint, getCounterResetHint(chks[i]), fmt.Sprintf("chunk at index %d", i))
470
	}
471
}
472

473
func getCounterResetHint(chunk chunks.Meta) chunkenc.CounterResetHeader {
474
	switch chk := chunk.Chunk.(type) {
475
	case *chunkenc.HistogramChunk:
476
		return chk.GetCounterResetHeader()
477
	case *chunkenc.FloatHistogramChunk:
478
		return chk.GetCounterResetHeader()
479
	}
480
	return chunkenc.UnknownCounterReset
481
}
482

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.