Skip to content

Commit

Permalink
tweaking the names a little per some discussions to make things easie…
Browse files Browse the repository at this point in the history
…r to understand (#1582)

Signed-off-by: Edward Welch <edward.welch@grafana.com>
  • Loading branch information
slim-bean authored Jan 24, 2020
1 parent f1b8d4d commit 64798e2
Show file tree
Hide file tree
Showing 5 changed files with 59 additions and 59 deletions.
10 changes: 5 additions & 5 deletions pkg/chunkenc/memchunk.go
Original file line number Diff line number Diff line change
Expand Up @@ -511,10 +511,10 @@ func (hb *headBlock) iterator(ctx context.Context, mint, maxt int64, filter logq
// the alternate would be that we allocate a new b.entries everytime we cut a block,
// but the tradeoff is that queries to near-realtime data would be much lower than
// cutting of blocks.
chunkStats.LinesUncompressed += int64(len(hb.entries))
chunkStats.HeadChunkLines += int64(len(hb.entries))
entries := make([]entry, 0, len(hb.entries))
for _, e := range hb.entries {
chunkStats.BytesUncompressed += int64(len(e.s))
chunkStats.HeadChunkBytes += int64(len(e.s))
if filter == nil || filter([]byte(e.s)) {
entries = append(entries, e)
}
Expand Down Expand Up @@ -582,7 +582,7 @@ type bufferedIterator struct {

func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, filter logql.Filter) *bufferedIterator {
chunkStats := stats.GetChunkData(ctx)
chunkStats.BytesCompressed += int64(len(b))
chunkStats.CompressedBytes += int64(len(b))
return &bufferedIterator{
stats: chunkStats,
origBytes: b,
Expand All @@ -608,8 +608,8 @@ func (si *bufferedIterator) Next() bool {
return false
}
// we decode always the line length and ts as varint
si.stats.BytesDecompressed += int64(len(line)) + 2*binary.MaxVarintLen64
si.stats.LinesDecompressed++
si.stats.DecompressedBytes += int64(len(line)) + 2*binary.MaxVarintLen64
si.stats.DecompressedLines++
if si.filter != nil && !si.filter(line) {
continue
}
Expand Down
38 changes: 19 additions & 19 deletions pkg/logql/stats/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,22 +53,22 @@ func Log(log log.Logger, r Result) {
"Ingester.TotalBatches", r.Ingester.TotalBatches,
"Ingester.TotalLinesSent", r.Ingester.TotalLinesSent,

"Ingester.BytesUncompressed", humanize.Bytes(uint64(r.Ingester.BytesUncompressed)),
"Ingester.LinesUncompressed", r.Ingester.LinesUncompressed,
"Ingester.BytesDecompressed", humanize.Bytes(uint64(r.Ingester.BytesDecompressed)),
"Ingester.LinesDecompressed", r.Ingester.LinesDecompressed,
"Ingester.BytesCompressed", humanize.Bytes(uint64(r.Ingester.BytesCompressed)),
"Ingester.HeadChunkBytes", humanize.Bytes(uint64(r.Ingester.HeadChunkBytes)),
"Ingester.HeadChunkLines", r.Ingester.HeadChunkLines,
"Ingester.DecompressedBytes", humanize.Bytes(uint64(r.Ingester.DecompressedBytes)),
"Ingester.DecompressedLines", r.Ingester.DecompressedLines,
"Ingester.CompressedBytes", humanize.Bytes(uint64(r.Ingester.CompressedBytes)),
"Ingester.TotalDuplicates", r.Ingester.TotalDuplicates,

"Store.TotalChunksRef", r.Store.TotalChunksRef,
"Store.TotalDownloadedChunks", r.Store.TotalDownloadedChunks,
"Store.TimeDownloadingChunks", r.Store.TimeDownloadingChunks,

"Store.BytesUncompressed", humanize.Bytes(uint64(r.Store.BytesUncompressed)),
"Store.LinesUncompressed", r.Store.LinesUncompressed,
"Store.BytesDecompressed", humanize.Bytes(uint64(r.Store.BytesDecompressed)),
"Store.LinesDecompressed", r.Store.LinesDecompressed,
"Store.BytesCompressed", humanize.Bytes(uint64(r.Store.BytesCompressed)),
"Store.HeadChunkBytes", humanize.Bytes(uint64(r.Store.HeadChunkBytes)),
"Store.HeadChunkLines", r.Store.HeadChunkLines,
"Store.DecompressedBytes", humanize.Bytes(uint64(r.Store.DecompressedBytes)),
"Store.DecompressedLines", r.Store.DecompressedLines,
"Store.CompressedBytes", humanize.Bytes(uint64(r.Store.CompressedBytes)),
"Store.TotalDuplicates", r.Store.TotalDuplicates,

"Summary.BytesProcessedPerSeconds", humanize.Bytes(uint64(r.Summary.BytesProcessedPerSeconds)),
Expand Down Expand Up @@ -112,11 +112,11 @@ func NewContext(ctx context.Context) context.Context {

// ChunkData contains chunks specific statistics.
type ChunkData struct {
BytesUncompressed int64 // Total bytes processed but was already in memory. (found in the headchunk)
LinesUncompressed int64 // Total lines processed but was already in memory. (found in the headchunk)
BytesDecompressed int64 // Total bytes decompressed and processed from chunks.
LinesDecompressed int64 // Total lines decompressed and processed from chunks.
BytesCompressed int64 // Total bytes of compressed chunks (blocks) processed.
HeadChunkBytes int64 // Total bytes processed but was already in memory. (found in the headchunk)
HeadChunkLines int64 // Total lines processed but was already in memory. (found in the headchunk)
DecompressedBytes int64 // Total bytes decompressed and processed from chunks.
DecompressedLines int64 // Total lines decompressed and processed from chunks.
CompressedBytes int64 // Total bytes of compressed chunks (blocks) processed.
TotalDuplicates int64 // Total duplicates found while processing.
}

Expand Down Expand Up @@ -178,13 +178,13 @@ func Snapshot(ctx context.Context, execTime time.Duration) Result {
}

// calculate the summary
res.Summary.TotalBytesProcessed = res.Store.BytesDecompressed + res.Store.BytesUncompressed +
res.Ingester.BytesDecompressed + res.Ingester.BytesUncompressed
res.Summary.TotalBytesProcessed = res.Store.DecompressedBytes + res.Store.HeadChunkBytes +
res.Ingester.DecompressedBytes + res.Ingester.HeadChunkBytes
res.Summary.BytesProcessedPerSeconds =
int64(float64(res.Summary.TotalBytesProcessed) /
execTime.Seconds())
res.Summary.TotalLinesProcessed = res.Store.LinesDecompressed + res.Store.LinesUncompressed +
res.Ingester.LinesDecompressed + res.Ingester.LinesUncompressed
res.Summary.TotalLinesProcessed = res.Store.DecompressedLines + res.Store.HeadChunkLines +
res.Ingester.DecompressedLines + res.Ingester.HeadChunkLines
res.Summary.LinesProcessedPerSeconds =
int64(float64(res.Summary.TotalLinesProcessed) /
execTime.Seconds())
Expand Down
40 changes: 20 additions & 20 deletions pkg/logql/stats/context_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ import (
func TestSnapshot(t *testing.T) {
ctx := NewContext(context.Background())

GetChunkData(ctx).BytesUncompressed += 10
GetChunkData(ctx).LinesUncompressed += 20
GetChunkData(ctx).BytesDecompressed += 40
GetChunkData(ctx).LinesDecompressed += 20
GetChunkData(ctx).BytesCompressed += 30
GetChunkData(ctx).HeadChunkBytes += 10
GetChunkData(ctx).HeadChunkLines += 20
GetChunkData(ctx).DecompressedBytes += 40
GetChunkData(ctx).DecompressedLines += 20
GetChunkData(ctx).CompressedBytes += 30
GetChunkData(ctx).TotalDuplicates += 10

GetStoreData(ctx).TotalChunksRef += 50
Expand All @@ -35,11 +35,11 @@ func TestSnapshot(t *testing.T) {
TotalLinesSent: 60,
},
ChunkData: ChunkData{
BytesUncompressed: 10,
LinesUncompressed: 20,
BytesDecompressed: 24,
LinesDecompressed: 40,
BytesCompressed: 60,
HeadChunkBytes: 10,
HeadChunkLines: 20,
DecompressedBytes: 24,
DecompressedLines: 40,
CompressedBytes: 60,
TotalDuplicates: 2,
},
TotalReached: 2,
Expand All @@ -51,11 +51,11 @@ func TestSnapshot(t *testing.T) {
TimeDownloadingChunks: time.Second,
},
ChunkData: ChunkData{
BytesUncompressed: 10,
LinesUncompressed: 20,
BytesDecompressed: 40,
LinesDecompressed: 20,
BytesCompressed: 30,
HeadChunkBytes: 10,
HeadChunkLines: 20,
DecompressedBytes: 40,
DecompressedLines: 20,
CompressedBytes: 30,
TotalDuplicates: 10,
},
},
Expand All @@ -75,11 +75,11 @@ func fakeIngesterQuery(ctx context.Context) {
meta := d.addTrailer()

c, _ := jsoniter.MarshalToString(ChunkData{
BytesUncompressed: 5,
LinesUncompressed: 10,
BytesDecompressed: 12,
LinesDecompressed: 20,
BytesCompressed: 30,
HeadChunkBytes: 5,
HeadChunkLines: 10,
DecompressedBytes: 12,
DecompressedLines: 20,
CompressedBytes: 30,
TotalDuplicates: 1,
})
meta.Set(chunkDataKey, c)
Expand Down
10 changes: 5 additions & 5 deletions pkg/logql/stats/grpc.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,11 +85,11 @@ func decodeTrailers(ctx context.Context) Ingester {
res.TotalChunksMatched += ing.TotalChunksMatched
res.TotalBatches += ing.TotalBatches
res.TotalLinesSent += ing.TotalLinesSent
res.BytesUncompressed += ing.BytesUncompressed
res.LinesUncompressed += ing.LinesUncompressed
res.BytesDecompressed += ing.BytesDecompressed
res.LinesDecompressed += ing.LinesDecompressed
res.BytesCompressed += ing.BytesCompressed
res.HeadChunkBytes += ing.HeadChunkBytes
res.HeadChunkLines += ing.HeadChunkLines
res.DecompressedBytes += ing.DecompressedBytes
res.DecompressedLines += ing.DecompressedLines
res.CompressedBytes += ing.CompressedBytes
res.TotalDuplicates += ing.TotalDuplicates
}
return res
Expand Down
20 changes: 10 additions & 10 deletions pkg/logql/stats/grpc_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,11 @@ func TestCollectTrailer(t *testing.T) {
GetIngesterData(ingCtx).TotalChunksMatched++
GetIngesterData(ingCtx).TotalBatches = +2
GetIngesterData(ingCtx).TotalLinesSent = +3
GetChunkData(ingCtx).BytesUncompressed++
GetChunkData(ingCtx).LinesUncompressed++
GetChunkData(ingCtx).BytesDecompressed++
GetChunkData(ingCtx).LinesDecompressed++
GetChunkData(ingCtx).BytesCompressed++
GetChunkData(ingCtx).HeadChunkBytes++
GetChunkData(ingCtx).HeadChunkLines++
GetChunkData(ingCtx).DecompressedBytes++
GetChunkData(ingCtx).DecompressedLines++
GetChunkData(ingCtx).CompressedBytes++
GetChunkData(ingCtx).TotalDuplicates++
return nil
})
Expand Down Expand Up @@ -85,11 +85,11 @@ func TestCollectTrailer(t *testing.T) {
require.Equal(t, int64(2), res.TotalChunksMatched)
require.Equal(t, int64(4), res.TotalBatches)
require.Equal(t, int64(6), res.TotalLinesSent)
require.Equal(t, int64(2), res.BytesUncompressed)
require.Equal(t, int64(2), res.LinesUncompressed)
require.Equal(t, int64(2), res.BytesDecompressed)
require.Equal(t, int64(2), res.LinesDecompressed)
require.Equal(t, int64(2), res.BytesCompressed)
require.Equal(t, int64(2), res.HeadChunkBytes)
require.Equal(t, int64(2), res.HeadChunkLines)
require.Equal(t, int64(2), res.DecompressedBytes)
require.Equal(t, int64(2), res.DecompressedLines)
require.Equal(t, int64(2), res.CompressedBytes)
require.Equal(t, int64(2), res.TotalDuplicates)
}

Expand Down

0 comments on commit 64798e2

Please sign in to comment.