prometheus
481 строка · 14.5 Кб
1// Copyright 2021 The Prometheus Authors
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14package storage
15
16import (
17"fmt"
18"math"
19"testing"
20
21"github.com/stretchr/testify/require"
22
23"github.com/prometheus/prometheus/model/histogram"
24"github.com/prometheus/prometheus/model/labels"
25"github.com/prometheus/prometheus/model/value"
26"github.com/prometheus/prometheus/tsdb/chunkenc"
27"github.com/prometheus/prometheus/tsdb/chunks"
28)
29
30func TestListSeriesIterator(t *testing.T) {
31it := NewListSeriesIterator(samples{
32fSample{0, 0},
33fSample{1, 1},
34fSample{1, 1.5},
35fSample{2, 2},
36fSample{3, 3},
37})
38
39// Seek to the first sample with ts=1.
40require.Equal(t, chunkenc.ValFloat, it.Seek(1))
41ts, v := it.At()
42require.Equal(t, int64(1), ts)
43require.Equal(t, 1., v)
44
45// Seek one further, next sample still has ts=1.
46require.Equal(t, chunkenc.ValFloat, it.Next())
47ts, v = it.At()
48require.Equal(t, int64(1), ts)
49require.Equal(t, 1.5, v)
50
51// Seek again to 1 and make sure we stay where we are.
52require.Equal(t, chunkenc.ValFloat, it.Seek(1))
53ts, v = it.At()
54require.Equal(t, int64(1), ts)
55require.Equal(t, 1.5, v)
56
57// Another seek.
58require.Equal(t, chunkenc.ValFloat, it.Seek(3))
59ts, v = it.At()
60require.Equal(t, int64(3), ts)
61require.Equal(t, 3., v)
62
63// And we don't go back.
64require.Equal(t, chunkenc.ValFloat, it.Seek(2))
65ts, v = it.At()
66require.Equal(t, int64(3), ts)
67require.Equal(t, 3., v)
68
69// Seek beyond the end.
70require.Equal(t, chunkenc.ValNone, it.Seek(5))
71// And we don't go back. (This exposes issue #10027.)
72require.Equal(t, chunkenc.ValNone, it.Seek(2))
73}
74
75// TestSeriesSetToChunkSet test the property of SeriesSet that says
76// returned series should be iterable even after Next is called.
77func TestChunkSeriesSetToSeriesSet(t *testing.T) {
78series := []struct {
79lbs labels.Labels
80samples []chunks.Sample
81}{
82{
83lbs: labels.FromStrings("__name__", "up", "instance", "localhost:8080"),
84samples: []chunks.Sample{
85fSample{t: 1, f: 1},
86fSample{t: 2, f: 2},
87fSample{t: 3, f: 3},
88fSample{t: 4, f: 4},
89},
90}, {
91lbs: labels.FromStrings("__name__", "up", "instance", "localhost:8081"),
92samples: []chunks.Sample{
93fSample{t: 1, f: 2},
94fSample{t: 2, f: 3},
95fSample{t: 3, f: 4},
96fSample{t: 4, f: 5},
97fSample{t: 5, f: 6},
98fSample{t: 6, f: 7},
99},
100},
101}
102var chunkSeries []ChunkSeries
103for _, s := range series {
104chunkSeries = append(chunkSeries, NewListChunkSeriesFromSamples(s.lbs, s.samples))
105}
106css := NewMockChunkSeriesSet(chunkSeries...)
107
108ss := NewSeriesSetFromChunkSeriesSet(css)
109var ssSlice []Series
110for ss.Next() {
111ssSlice = append(ssSlice, ss.At())
112}
113require.Len(t, ssSlice, 2)
114var iter chunkenc.Iterator
115for i, s := range ssSlice {
116require.EqualValues(t, series[i].lbs, s.Labels())
117iter = s.Iterator(iter)
118j := 0
119for iter.Next() == chunkenc.ValFloat {
120ts, v := iter.At()
121require.EqualValues(t, fSample{t: ts, f: v}, series[i].samples[j])
122j++
123}
124}
125}
126
127type histogramTest struct {
128samples []chunks.Sample
129expectedCounterResetHeaders []chunkenc.CounterResetHeader
130}
131
132func TestHistogramSeriesToChunks(t *testing.T) {
133h1 := &histogram.Histogram{
134Count: 7,
135ZeroCount: 2,
136ZeroThreshold: 0.001,
137Sum: 100,
138Schema: 0,
139PositiveSpans: []histogram.Span{
140{Offset: 0, Length: 2},
141},
142PositiveBuckets: []int64{2, 1}, // Abs: 2, 3
143}
144// Appendable to h1.
145h2 := &histogram.Histogram{
146Count: 12,
147ZeroCount: 2,
148ZeroThreshold: 0.001,
149Sum: 100,
150Schema: 0,
151PositiveSpans: []histogram.Span{
152{Offset: 0, Length: 2},
153{Offset: 1, Length: 2},
154},
155PositiveBuckets: []int64{2, 1, -2, 3}, // Abs: 2, 3, 1, 4
156}
157// Implicit counter reset by reduction in buckets, not appendable.
158h2down := &histogram.Histogram{
159Count: 10,
160ZeroCount: 2,
161ZeroThreshold: 0.001,
162Sum: 100,
163Schema: 0,
164PositiveSpans: []histogram.Span{
165{Offset: 0, Length: 2},
166{Offset: 1, Length: 2},
167},
168PositiveBuckets: []int64{1, 1, -1, 3}, // Abs: 1, 2, 1, 4
169}
170
171fh1 := &histogram.FloatHistogram{
172Count: 6,
173ZeroCount: 2,
174ZeroThreshold: 0.001,
175Sum: 100,
176Schema: 0,
177PositiveSpans: []histogram.Span{
178{Offset: 0, Length: 2},
179},
180PositiveBuckets: []float64{3, 1},
181}
182// Appendable to fh1.
183fh2 := &histogram.FloatHistogram{
184Count: 17,
185ZeroCount: 2,
186ZeroThreshold: 0.001,
187Sum: 100,
188Schema: 0,
189PositiveSpans: []histogram.Span{
190{Offset: 0, Length: 2},
191{Offset: 1, Length: 2},
192},
193PositiveBuckets: []float64{4, 2, 7, 2},
194}
195// Implicit counter reset by reduction in buckets, not appendable.
196fh2down := &histogram.FloatHistogram{
197Count: 15,
198ZeroCount: 2,
199ZeroThreshold: 0.001,
200Sum: 100,
201Schema: 0,
202PositiveSpans: []histogram.Span{
203{Offset: 0, Length: 2},
204{Offset: 1, Length: 2},
205},
206PositiveBuckets: []float64{2, 2, 7, 2},
207}
208
209// Gauge histogram.
210gh1 := &histogram.Histogram{
211CounterResetHint: histogram.GaugeType,
212Count: 7,
213ZeroCount: 2,
214ZeroThreshold: 0.001,
215Sum: 100,
216Schema: 0,
217PositiveSpans: []histogram.Span{
218{Offset: 0, Length: 2},
219},
220PositiveBuckets: []int64{2, 1}, // Abs: 2, 3
221}
222gh2 := &histogram.Histogram{
223CounterResetHint: histogram.GaugeType,
224Count: 12,
225ZeroCount: 2,
226ZeroThreshold: 0.001,
227Sum: 100,
228Schema: 0,
229PositiveSpans: []histogram.Span{
230{Offset: 0, Length: 2},
231{Offset: 1, Length: 2},
232},
233PositiveBuckets: []int64{2, 1, -2, 3}, // Abs: 2, 3, 1, 4
234}
235
236// Float gauge histogram.
237gfh1 := &histogram.FloatHistogram{
238CounterResetHint: histogram.GaugeType,
239Count: 6,
240ZeroCount: 2,
241ZeroThreshold: 0.001,
242Sum: 100,
243Schema: 0,
244PositiveSpans: []histogram.Span{
245{Offset: 0, Length: 2},
246},
247PositiveBuckets: []float64{3, 1},
248}
249gfh2 := &histogram.FloatHistogram{
250CounterResetHint: histogram.GaugeType,
251Count: 17,
252ZeroCount: 2,
253ZeroThreshold: 0.001,
254Sum: 100,
255Schema: 0,
256PositiveSpans: []histogram.Span{
257{Offset: 0, Length: 2},
258{Offset: 1, Length: 2},
259},
260PositiveBuckets: []float64{4, 2, 7, 2},
261}
262
263staleHistogram := &histogram.Histogram{
264Sum: math.Float64frombits(value.StaleNaN),
265}
266staleFloatHistogram := &histogram.FloatHistogram{
267Sum: math.Float64frombits(value.StaleNaN),
268}
269
270tests := map[string]histogramTest{
271"single histogram to single chunk": {
272samples: []chunks.Sample{
273hSample{t: 1, h: h1},
274},
275expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset},
276},
277"two histograms encoded to a single chunk": {
278samples: []chunks.Sample{
279hSample{t: 1, h: h1},
280hSample{t: 2, h: h2},
281},
282expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset},
283},
284"two histograms encoded to two chunks": {
285samples: []chunks.Sample{
286hSample{t: 1, h: h2},
287hSample{t: 2, h: h1},
288},
289expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.CounterReset},
290},
291"histogram and stale sample encoded to two chunks": {
292samples: []chunks.Sample{
293hSample{t: 1, h: staleHistogram},
294hSample{t: 2, h: h1},
295},
296expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
297},
298"histogram and reduction in bucket encoded to two chunks": {
299samples: []chunks.Sample{
300hSample{t: 1, h: h1},
301hSample{t: 2, h: h2down},
302},
303expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.CounterReset},
304},
305// Float histograms.
306"single float histogram to single chunk": {
307samples: []chunks.Sample{
308fhSample{t: 1, fh: fh1},
309},
310expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset},
311},
312"two float histograms encoded to a single chunk": {
313samples: []chunks.Sample{
314fhSample{t: 1, fh: fh1},
315fhSample{t: 2, fh: fh2},
316},
317expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset},
318},
319"two float histograms encoded to two chunks": {
320samples: []chunks.Sample{
321fhSample{t: 1, fh: fh2},
322fhSample{t: 2, fh: fh1},
323},
324expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.CounterReset},
325},
326"float histogram and stale sample encoded to two chunks": {
327samples: []chunks.Sample{
328fhSample{t: 1, fh: staleFloatHistogram},
329fhSample{t: 2, fh: fh1},
330},
331expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
332},
333"float histogram and reduction in bucket encoded to two chunks": {
334samples: []chunks.Sample{
335fhSample{t: 1, fh: fh1},
336fhSample{t: 2, fh: fh2down},
337},
338expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.CounterReset},
339},
340// Mixed.
341"histogram and float histogram encoded to two chunks": {
342samples: []chunks.Sample{
343hSample{t: 1, h: h1},
344fhSample{t: 2, fh: fh2},
345},
346expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
347},
348"float histogram and histogram encoded to two chunks": {
349samples: []chunks.Sample{
350fhSample{t: 1, fh: fh1},
351hSample{t: 2, h: h2},
352},
353expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
354},
355"histogram and stale float histogram encoded to two chunks": {
356samples: []chunks.Sample{
357hSample{t: 1, h: h1},
358fhSample{t: 2, fh: staleFloatHistogram},
359},
360expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.UnknownCounterReset, chunkenc.UnknownCounterReset},
361},
362"single gauge histogram encoded to one chunk": {
363samples: []chunks.Sample{
364hSample{t: 1, h: gh1},
365},
366expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
367},
368"two gauge histograms encoded to one chunk when counter increases": {
369samples: []chunks.Sample{
370hSample{t: 1, h: gh1},
371hSample{t: 2, h: gh2},
372},
373expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
374},
375"two gauge histograms encoded to one chunk when counter decreases": {
376samples: []chunks.Sample{
377hSample{t: 1, h: gh2},
378hSample{t: 2, h: gh1},
379},
380expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
381},
382"single gauge float histogram encoded to one chunk": {
383samples: []chunks.Sample{
384fhSample{t: 1, fh: gfh1},
385},
386expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
387},
388"two float gauge histograms encoded to one chunk when counter increases": {
389samples: []chunks.Sample{
390fhSample{t: 1, fh: gfh1},
391fhSample{t: 2, fh: gfh2},
392},
393expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
394},
395"two float gauge histograms encoded to one chunk when counter decreases": {
396samples: []chunks.Sample{
397fhSample{t: 1, fh: gfh2},
398fhSample{t: 2, fh: gfh1},
399},
400expectedCounterResetHeaders: []chunkenc.CounterResetHeader{chunkenc.GaugeType},
401},
402}
403
404for testName, test := range tests {
405t.Run(testName, func(t *testing.T) {
406testHistogramsSeriesToChunks(t, test)
407})
408}
409}
410
411func testHistogramsSeriesToChunks(t *testing.T, test histogramTest) {
412lbs := labels.FromStrings("__name__", "up", "instance", "localhost:8080")
413copiedSamples := []chunks.Sample{}
414for _, s := range test.samples {
415switch cs := s.(type) {
416case hSample:
417copiedSamples = append(copiedSamples, hSample{t: cs.t, h: cs.h.Copy()})
418case fhSample:
419copiedSamples = append(copiedSamples, fhSample{t: cs.t, fh: cs.fh.Copy()})
420default:
421t.Error("internal error, unexpected type")
422}
423}
424series := NewListSeries(lbs, copiedSamples)
425encoder := NewSeriesToChunkEncoder(series)
426require.EqualValues(t, lbs, encoder.Labels())
427
428chks, err := ExpandChunks(encoder.Iterator(nil))
429require.NoError(t, err)
430require.Equal(t, len(test.expectedCounterResetHeaders), len(chks))
431
432// Decode all encoded samples and assert they are equal to the original ones.
433encodedSamples := chunks.ChunkMetasToSamples(chks)
434require.Equal(t, len(test.samples), len(encodedSamples))
435
436for i, s := range test.samples {
437encodedSample := encodedSamples[i]
438switch expectedSample := s.(type) {
439case hSample:
440require.Equal(t, chunkenc.ValHistogram, encodedSample.Type(), "expect histogram", fmt.Sprintf("at idx %d", i))
441h := encodedSample.H()
442// Ignore counter reset if not gauge here, will check on chunk level.
443if expectedSample.h.CounterResetHint != histogram.GaugeType {
444h.CounterResetHint = histogram.UnknownCounterReset
445}
446if value.IsStaleNaN(expectedSample.h.Sum) {
447require.True(t, value.IsStaleNaN(h.Sum), fmt.Sprintf("at idx %d", i))
448continue
449}
450require.Equal(t, *expectedSample.h, *h.Compact(0), fmt.Sprintf("at idx %d", i))
451case fhSample:
452require.Equal(t, chunkenc.ValFloatHistogram, encodedSample.Type(), "expect float histogram", fmt.Sprintf("at idx %d", i))
453fh := encodedSample.FH()
454// Ignore counter reset if not gauge here, will check on chunk level.
455if expectedSample.fh.CounterResetHint != histogram.GaugeType {
456fh.CounterResetHint = histogram.UnknownCounterReset
457}
458if value.IsStaleNaN(expectedSample.fh.Sum) {
459require.True(t, value.IsStaleNaN(fh.Sum), fmt.Sprintf("at idx %d", i))
460continue
461}
462require.Equal(t, *expectedSample.fh, *fh.Compact(0), fmt.Sprintf("at idx %d", i))
463default:
464t.Error("internal error, unexpected type")
465}
466}
467
468for i, expectedCounterResetHint := range test.expectedCounterResetHeaders {
469require.Equal(t, expectedCounterResetHint, getCounterResetHint(chks[i]), fmt.Sprintf("chunk at index %d", i))
470}
471}
472
473func getCounterResetHint(chunk chunks.Meta) chunkenc.CounterResetHeader {
474switch chk := chunk.Chunk.(type) {
475case *chunkenc.HistogramChunk:
476return chk.GetCounterResetHeader()
477case *chunkenc.FloatHistogramChunk:
478return chk.GetCounterResetHeader()
479}
480return chunkenc.UnknownCounterReset
481}
482