Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[testbed] - Add scenarios to handle large files #34417

Merged
merged 15 commits into from
Sep 3, 2024
Prev Previous commit
fix: add comments
  • Loading branch information
VihasMakwana committed Sep 3, 2024
commit d550f7abe55828252a0cd9d22ccbb9eda4fc2135
21 changes: 10 additions & 11 deletions testbed/tests/log_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,11 @@ func TestLogLargeFiles(t *testing.T) {
sleepSeconds int
}{
{
/*
* The FileLogWriter generates strings almost 100 bytes each.
* With a rate of 200,000 lines per second over a duration of 100 seconds,
* this results in a file size of approximately 2GB over its lifetime.
*/
name: "filelog-largefiles-2Gb-lifetime",
sender: datasenders.NewFileLogWriter(),
receiver: testbed.NewOTLPDataReceiver(testutil.GetAvailablePort(t)),
Expand All @@ -261,6 +266,11 @@ func TestLogLargeFiles(t *testing.T) {
sleepSeconds: 100,
},
{
/*
* The FileLogWriter generates strings almost 100 bytes each.
* With a rate of 330,000 lines per second over a duration of 200 seconds,
* this results in a file size of approximately 6GB over its lifetime.
*/
name: "filelog-largefiles-6GB-lifetime",
sender: datasenders.NewFileLogWriter(),
receiver: testbed.NewOTLPDataReceiver(testutil.GetAvailablePort(t)),
Expand All @@ -271,17 +281,6 @@ func TestLogLargeFiles(t *testing.T) {
},
sleepSeconds: 200,
},
{
name: "filelog-largefiles-50MB/sec",
sender: datasenders.NewFileLogWriter(),
receiver: testbed.NewOTLPDataReceiver(testutil.GetAvailablePort(t)),
loadOptions: testbed.LoadOptions{
DataItemsPerSecond: 400000,
ItemsPerBatch: 100,
Parallel: 1,
},
sleepSeconds: 100,
},
}
processors := map[string]string{
"batch": `
Expand Down