Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .github/workflows/ci-integration-review.yml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ jobs:
with:
timeout_minutes: 30
max_attempts: 3
command: bash scripts/run-integration-test.sh int ${{ matrix.service }}
command: cargo vdev integration run ${{ matrix.service }} --build-all --reuse-image

e2e-tests:
needs: prep-pr
Expand All @@ -134,7 +134,7 @@ jobs:
with:
timeout_minutes: 35
max_attempts: 3
command: bash scripts/run-integration-test.sh e2e datadog-logs
command: cargo vdev e2e run datadog-logs --build-all --reuse-image

- name: datadog-e2e-metrics
if: ${{ startsWith(github.event.review.body, '/ci-run-e2e-datadog-metrics')
Expand All @@ -144,13 +144,13 @@ jobs:
with:
timeout_minutes: 35
max_attempts: 3
command: bash scripts/run-integration-test.sh e2e datadog-metrics
command: cargo vdev e2e run datadog-metrics --build-all --reuse-image

- name: e2e-opentelemetry-logs
if: ${{ startsWith(github.event.review.body, '/ci-run-e2e-opentelemetry-logs')
|| startsWith(github.event.review.body, '/ci-run-e2e-all')
|| startsWith(github.event.review.body, '/ci-run-all') }}
run: bash scripts/run-integration-test.sh e2e opentelemetry-logs
run: cargo vdev e2e run opentelemetry-logs --build-all --reuse-image

update-pr-status:
name: Signal result to PR
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ jobs:
# Only install dep if test runs
bash scripts/environment/prepare.sh --modules=datadog-ci
echo "Running test for ${{ matrix.service }}"
bash scripts/run-integration-test.sh int ${{ matrix.service }}
cargo vdev integration run ${{ matrix.service }} --build-all --reuse-image
else
echo "Skipping ${{ matrix.service }} test as the value is false or conditions not met."
fi
Expand Down Expand Up @@ -153,7 +153,7 @@ jobs:
# Only install dep if test runs
bash scripts/environment/prepare.sh --modules=datadog-ci
echo "Running test for ${{ matrix.service }}"
bash scripts/run-integration-test.sh e2e ${{ matrix.service }}
cargo vdev e2e run ${{ matrix.service }} --build-all --reuse-image
else
echo "Skipping ${{ matrix.service }} test as the value is false or conditions not met."
fi
Expand Down
2 changes: 1 addition & 1 deletion scripts/e2e/opentelemetry-logs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ This end-to-end (E2E) test validates that log events generated in a container ar

```shell
# from the repo root directory
./scripts/run-integration-test.sh e2e opentelemetry-logs
cargo vdev e2e run opentelemetry-logs
```

## Notes
Expand Down
155 changes: 0 additions & 155 deletions scripts/run-integration-test.sh

This file was deleted.

1 change: 1 addition & 0 deletions vdev/src/commands/compose_tests/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
mod active_projects;

pub(crate) mod ci_paths;
pub(crate) mod run;
pub(crate) mod show;
pub(crate) mod start;
pub(crate) mod stop;
Expand Down
165 changes: 165 additions & 0 deletions vdev/src/commands/compose_tests/run.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
use anyhow::{Context, Result};
use std::process::Command;

use crate::testing::{config::ComposeTestConfig, integration::ComposeTestLocalConfig};

/// Run a complete test workflow orchestrating start, test, and stop phases
///
/// This function implements the full test lifecycle used in CI:
/// 1. Clean up previous test output
/// 2. Start the environment
/// 3. Run tests with retries
/// 4. Upload results to Datadog (in CI)
/// 5. Stop the environment (always, as cleanup)
pub fn exec(
local_config: ComposeTestLocalConfig,
test_name: &str,
environments: &[String],
build_all: bool,
reuse_image: bool,
retries: u8,
show_logs: bool,
) -> Result<()> {
let environments = if environments.is_empty() {
// Auto-discover environments
let (_test_dir, config) = ComposeTestConfig::load(local_config.directory, test_name)?;
config.environments().keys().cloned().collect()
} else {
environments.to_vec()
};

if environments.is_empty() {
anyhow::bail!("No environments found for test '{test_name}'");
}

for environment in &environments {
info!("Running test '{test_name}' in environment '{environment}'");

// Clean up previous test output
cleanup_test_output(test_name)?;

// Start the environment
let start_result = super::start::exec(
local_config,
test_name,
Some(environment),
build_all,
reuse_image,
);

if let Err(e) = &start_result {
error!("Failed to start environment: {e}");
if show_logs || is_debug_mode() {
print_compose_logs(test_name);
}
}

let test_result = if start_result.is_ok() {
// Run tests
let result = super::test::exec(
local_config,
test_name,
Some(environment),
build_all,
reuse_image,
retries,
&[],
);

if let Err(e) = &result {
error!("Tests failed: {e}");
if show_logs || is_debug_mode() {
print_compose_logs(test_name);
}
}

// Upload test results (only in CI)
upload_test_results();

result
} else {
warn!("Skipping test phase because 'start' failed");
start_result
};

// Always stop the environment (best effort cleanup)
if let Err(e) = super::stop::exec(local_config, test_name, build_all, reuse_image) {
warn!("Failed to stop environment (cleanup): {e}");
}

// Exit early on first failure
test_result?;
}

Ok(())
}

/// Check if we're running in debug mode (`ACTIONS_RUNNER_DEBUG` or `RUST_LOG`)
fn is_debug_mode() -> bool {
std::env::var("ACTIONS_RUNNER_DEBUG")
.map(|v| v == "true")
.unwrap_or(false)
|| std::env::var("RUST_LOG")
.map(|v| v.contains("debug") || v.contains("trace"))
.unwrap_or(false)
}

/// Print docker compose logs for debugging
fn print_compose_logs(project_name: &str) {
info!("Collecting docker compose logs for project '{project_name}'...");

let result = Command::new("docker")
.args(["compose", "--project-name", project_name, "logs"])
.status();

if let Err(e) = result {
warn!("Failed to collect logs: {e}");
}
}

/// Clean up previous test output from the docker volume
fn cleanup_test_output(test_name: &str) -> Result<()> {
debug!("Cleaning up previous test output for '{test_name}'");

let status = Command::new("docker")
.args([
"run",
"--rm",
"-v",
&format!("vector_target:/output/{test_name}"),
"alpine:3.20",
"sh",
"-c",
&format!("rm -rf /output/{test_name}/*"),
])
.status()
.context("Failed to run docker cleanup command")?;

if !status.success() {
warn!("Failed to clean up previous test output (this may be okay if it didn't exist)");
}

Ok(())
}

/// Upload test results to Datadog (in CI only, no-op locally)
///
/// The script itself checks for CI environment and handles the logic.
fn upload_test_results() {
// Get the repo root path
let script_path =
std::path::PathBuf::from(crate::app::path()).join("scripts/upload-test-results.sh");

// Call the upload script (it checks for CI internally)
let result = Command::new(&script_path).status();

match result {
Ok(status) if !status.success() => {
warn!("Upload script exited with non-zero status");
}
Err(e) => {
warn!("Failed to execute upload script: {e}");
}
_ => {} // Success or handled by script
}
}
1 change: 1 addition & 0 deletions vdev/src/commands/e2e/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,6 @@ These test setups are organized into a set of integrations, located in subdirect
mod start,
mod stop,
mod test,
mod run,
mod ci_paths,
}
Loading
Loading