Skip to content

Commit 6bc676c

Browse files
committed
Using max chunk ts
Signed-off-by: Alan Protasio <alanprot@gmail.com>
1 parent 53debe8 commit 6bc676c

File tree

3 files changed

+8
-19
lines changed

3 files changed

+8
-19
lines changed

pkg/querier/batch/batch.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,6 +100,8 @@ func (a *iteratorAdapter) Seek(t int64) bool {
100100
}
101101
return true
102102
} else if t <= a.underlying.MaxTime() {
103+
// In this case, some timestamp inside the current underlying chunk can fulfill the seek.
104+
// In this case we will call next until we find the sample as it will be faster than calling seek directly.
103105
for a.underlying.Next(promchunk.BatchSize) {
104106
a.curr = a.underlying.Batch()
105107
if t <= a.curr.Timestamps[a.curr.Length-1] {

pkg/querier/batch/batch_test.go

Lines changed: 2 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -64,30 +64,13 @@ func BenchmarkNewChunkMergeIterator_Seek(b *testing.B) {
6464
scrapeInterval time.Duration
6565
enc promchunk.Encoding
6666
}{
67-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second / 2, enc: promchunk.PrometheusXorChunk},
68-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second, enc: promchunk.PrometheusXorChunk},
69-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 2, enc: promchunk.PrometheusXorChunk},
70-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 10, enc: promchunk.PrometheusXorChunk},
71-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 100, enc: promchunk.PrometheusXorChunk},
72-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 1000, enc: promchunk.PrometheusXorChunk},
7367
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second / 2, enc: promchunk.PrometheusXorChunk},
7468
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second, enc: promchunk.PrometheusXorChunk},
7569
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 2, enc: promchunk.PrometheusXorChunk},
7670
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 10, enc: promchunk.PrometheusXorChunk},
71+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 50, enc: promchunk.PrometheusXorChunk},
7772
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 100, enc: promchunk.PrometheusXorChunk},
78-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 1000, enc: promchunk.PrometheusXorChunk},
79-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second / 2, enc: promchunk.PrometheusXorChunk},
80-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second, enc: promchunk.PrometheusXorChunk},
81-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second * 2, enc: promchunk.PrometheusXorChunk},
82-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second * 10, enc: promchunk.PrometheusXorChunk},
83-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second * 100, enc: promchunk.PrometheusXorChunk},
84-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second * 1000, enc: promchunk.PrometheusXorChunk},
85-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second / 2, enc: promchunk.PrometheusXorChunk},
86-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second, enc: promchunk.PrometheusXorChunk},
87-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second * 2, enc: promchunk.PrometheusXorChunk},
88-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second * 10, enc: promchunk.PrometheusXorChunk},
89-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second * 100, enc: promchunk.PrometheusXorChunk},
90-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second * 1000, enc: promchunk.PrometheusXorChunk},
73+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 200, enc: promchunk.PrometheusXorChunk},
9174
}
9275

9376
for _, scenario := range scenarios {

pkg/querier/batch/merge.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,10 @@ type mergeIterator struct {
2222
}
2323

2424
func (c *mergeIterator) MaxTime() int64 {
25+
if len(c.h) < 1 {
26+
return -1
27+
}
28+
2529
return c.h[0].MaxTime()
2630
}
2731

0 commit comments

Comments
 (0)