Skip to content

Commit

Permalink
chore: Enable restart and reconnect tests in CI under a different Tag (
Browse files Browse the repository at this point in the history
…#10604)

Signed-off-by: Neeharika-Sompalli <neeharika.sompalli@swirldslabs.com>
Signed-off-by: Nathan Klick <nathan@swirldslabs.com>
Co-authored-by: Jeffrey Tang <jeffrey@swirldslabs.com>
Co-authored-by: Nathan Klick <nathan@swirldslabs.com>
  • Loading branch information
3 people authored Jan 5, 2024
1 parent 0e58723 commit 18d169b
Show file tree
Hide file tree
Showing 15 changed files with 248 additions and 41 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/node-flow-build-application.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ jobs:
enable-hapi-tests-token: ${{ github.event_name == 'push' || github.event.inputs.enable-hapi-tests == 'true' }}
enable-hapi-tests-smart-contract: ${{ github.event_name == 'push' || github.event.inputs.enable-hapi-tests == 'true' }}
enable-hapi-tests-time-consuming: ${{ github.event_name == 'push' || github.event.inputs.enable-hapi-tests == 'true' }}
enable-hapi-tests-restart: ${{ github.event_name == 'push' || github.event.inputs.enable-hapi-tests == 'true' }}
enable-hapi-tests-nd-reconnect: ${{ github.event_name == 'push' || github.event.inputs.enable-hapi-tests == 'true' }}
enable-e2e-tests: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }}
enable-spotless-check: ${{ github.event.inputs.enable-spotless-check == 'true' }}
enable-snyk-scan: ${{ github.event_name == 'push' || github.event.inputs.enable-snyk-scan == 'true' }}
Expand Down
40 changes: 40 additions & 0 deletions .github/workflows/node-flow-pull-request-checks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,46 @@ jobs:
gradle-cache-password: ${{ secrets.GRADLE_CACHE_PASSWORD }}
sonar-token: ${{ secrets.SONAR_TOKEN }}

hapi-tests-restart:
name: HAPI Tests (Restart)
uses: ./.github/workflows/node-zxc-compile-application-code.yaml
needs:
- dependency-check
- spotless
with:
custom-job-label: Standard
enable-unit-tests: false
enable-e2e-tests: false
enable-integration-tests: false
enable-hapi-tests-restart: true
enable-sonar-analysis: false
enable-network-log-capture: true
secrets:
access-token: ${{ secrets.GITHUB_TOKEN }}
gradle-cache-username: ${{ secrets.GRADLE_CACHE_USERNAME }}
gradle-cache-password: ${{ secrets.GRADLE_CACHE_PASSWORD }}
sonar-token: ${{ secrets.SONAR_TOKEN }}

hapi-tests-nd-reconnect:
name: HAPI Tests (Node Death Reconnect)
uses: ./.github/workflows/node-zxc-compile-application-code.yaml
needs:
- dependency-check
- spotless
with:
custom-job-label: Standard
enable-unit-tests: false
enable-e2e-tests: false
enable-integration-tests: false
enable-hapi-tests-nd-reconnect: true
enable-sonar-analysis: false
enable-network-log-capture: true
secrets:
access-token: ${{ secrets.GITHUB_TOKEN }}
gradle-cache-username: ${{ secrets.GRADLE_CACHE_USERNAME }}
gradle-cache-password: ${{ secrets.GRADLE_CACHE_PASSWORD }}
sonar-token: ${{ secrets.SONAR_TOKEN }}

abbreviated-panel:
name: JRS Panel
uses: ./.github/workflows/zxc-jrs-regression.yaml
Expand Down
62 changes: 62 additions & 0 deletions .github/workflows/node-zxc-compile-application-code.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,16 @@ on:
type: boolean
required: false
default: false
enable-hapi-tests-restart:
description: "HAPI Testing (restart) Enabled"
type: boolean
required: false
default: false
enable-hapi-tests-nd-reconnect:
description: "HAPI Testing (node death reconnect) Enabled"
type: boolean
required: false
default: false
enable-sonar-analysis:
description: "Sonar Analysis Enabled"
type: boolean
Expand Down Expand Up @@ -386,6 +396,58 @@ jobs:
path: |
hedera-node/test-clients/build/hapi-test/**/output/**
- name: HAPI Testing (Restart)
id: gradle-hapi-restart
if: ${{ inputs.enable-hapi-tests-restart && steps.gradle-build.conclusion == 'success' && !cancelled() }}
env:
LC_ALL: en.UTF-8
LANG: en_US.UTF-8
run: ${GRADLE_EXEC} hapiTestRestart -Dfile.encoding=UTF-8 --scan --no-daemon

- name: Publish HAPI Test (Restart) Report
uses: actionite/publish-unit-test-result-action@1e01e49081c6c4073913aa4b7980fa83e709f322 # v2.3.0
if: ${{ inputs.enable-hapi-tests-restart && steps.gradle-build.conclusion == 'success' && !cancelled() }}
with:
check_name: 'Node: HAPI Test (Restart) Results'
check_run_disabled: false
json_thousands_separator: ','
junit_files: "**/build/test-results/hapiTestRestart/TEST-*.xml"

- name: Publish HAPI Test (Restart) Network Logs
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
if: ${{ inputs.enable-hapi-tests-restart && inputs.enable-network-log-capture && !cancelled() }}
with:
name: HAPI Test (Restart) Network Logs
path: |
hedera-node/test-clients/build/hapi-test/**/output/**
hedera-node/test-clients/build/hapi-test/*.log
- name: HAPI Testing (Node Death Reconnect)
id: gradle-hapi-nd-reconnect
if: ${{ inputs.enable-hapi-tests-nd-reconnect && steps.gradle-build.conclusion == 'success' && !cancelled() }}
env:
LC_ALL: en.UTF-8
LANG: en_US.UTF-8
run: ${GRADLE_EXEC} hapiTestNDReconnect -Dfile.encoding=UTF-8 --scan --no-daemon

- name: Publish HAPI Test (Node Death Reconnect) Report
uses: actionite/publish-unit-test-result-action@1e01e49081c6c4073913aa4b7980fa83e709f322 # v2.3.0
if: ${{ inputs.enable-hapi-tests-nd-reconnect && steps.gradle-build.conclusion == 'success' && !cancelled() }}
with:
check_name: 'Node: HAPI Test (Node Death Reconnect) Results'
check_run_disabled: false
json_thousands_separator: ','
junit_files: "**/build/test-results/hapiTestNDReconnect/TEST-*.xml"

- name: Publish HAPI Test (Node Death Reconnect) Network Logs
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
if: ${{ inputs.enable-hapi-tests-nd-reconnect && inputs.enable-network-log-capture && !cancelled() }}
with:
name: HAPI Test (Node Death Reconnect) Network Logs
path: |
hedera-node/test-clients/build/hapi-test/**/output/**
hedera-node/test-clients/build/hapi-test/*.log
- name: E2E Testing
id: gradle-eet
if: ${{ inputs.enable-e2e-tests && steps.gradle-build.conclusion == 'success' && !cancelled() }}
Expand Down
2 changes: 2 additions & 0 deletions hedera-node/configuration/dev/application.properties
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,5 @@ contracts.chainId=298
staking.periodMins=1
staking.fees.nodeRewardPercentage=10
staking.fees.stakingRewardPercentage=10
# Needed for Restart and Reconnect HapiTests that run many transactions of each type
bootstrap.throttleDefsJson.resource=throttles-dev.json
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,10 @@ private MerkleHederaState(@NonNull final MerkleHederaState from) {
// **MOVE** over the onInit handler. Don't leave it on the immutable state
this.onInit = from.onInit;
from.onInit = null;

// **MOVE** over the onUpdateWeight handler. Don't leave it on the immutable state
this.onUpdateWeight = from.onUpdateWeight;
from.onUpdateWeight = null;
}

@Override
Expand Down
40 changes: 39 additions & 1 deletion hedera-node/test-clients/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,16 @@ tasks.register<Test>("hapiTestMisc") {
testClassesDirs = sourceSets.main.get().output.classesDirs
classpath = sourceSets.main.get().runtimeClasspath

useJUnitPlatform { excludeTags("CRYPTO", "TOKEN", "SMART_CONTRACT", "TIME_CONSUMING") }
useJUnitPlatform {
excludeTags(
"CRYPTO",
"TOKEN",
"SMART_CONTRACT",
"TIME_CONSUMING",
"RESTART",
"ND_RECONNECT"
)
}

// Limit heap and number of processors
maxHeapSize = "8g"
Expand Down Expand Up @@ -147,6 +156,35 @@ tasks.register<Test>("hapiTestTimeConsuming") {
modularity.inferModulePath.set(false)
}

// Runs a handful of test-suites that are extremely time-consuming (10+ minutes)
tasks.register<Test>("hapiTestRestart") {
testClassesDirs = sourceSets.main.get().output.classesDirs
classpath = sourceSets.main.get().runtimeClasspath

useJUnitPlatform { includeTags("RESTART") }

// Limit heap and number of processors
maxHeapSize = "8g"
jvmArgs("-XX:ActiveProcessorCount=6")

// Do not yet run things on the '--module-path'
modularity.inferModulePath.set(false)
}

tasks.register<Test>("hapiTestNDReconnect") {
testClassesDirs = sourceSets.main.get().output.classesDirs
classpath = sourceSets.main.get().runtimeClasspath

useJUnitPlatform { includeTags("ND_RECONNECT") }

// Limit heap and number of processors
maxHeapSize = "8g"
jvmArgs("-XX:ActiveProcessorCount=6")

// Do not yet run things on the '--module-path'
modularity.inferModulePath.set(false)
}

tasks.test {
// Disable these EET tests from being executed as part of the gradle "test" task.
// We should maybe remove them from src/test into src/eet,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,11 @@
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.TimeoutException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

public class HapiTestEnv {
private static final Logger logger = LogManager.getLogger(HapiTestEnv.class);
private static final String[] NODE_NAMES = new String[] {"Alice", "Bob", "Carol", "Dave"};
private static final int FIRST_GOSSIP_PORT = 60000;
private static final int FIRST_GOSSIP_TLS_PORT = 60001;
Expand Down Expand Up @@ -103,10 +106,23 @@ enum HapiTestNodesType {
public void start() throws TimeoutException {
started = true;
for (final var node : nodes) {
node.start();
logger.info("Starting node {}", node.getName());
try {
node.start();
} catch (RuntimeException e) {
logger.error(
"Node {} failed to start within {} seconds", node.getName(), CAPTIVE_NODE_STARTUP_TIME_LIMIT);
throw e;
}
}
for (final var node : nodes) {
node.waitForActive(CAPTIVE_NODE_STARTUP_TIME_LIMIT);
try {
node.waitForActive(CAPTIVE_NODE_STARTUP_TIME_LIMIT);
} catch (TimeoutException e) {
logger.error(
"Node {} failed to ACTIVE within {} seconds", node.getName(), CAPTIVE_NODE_STARTUP_TIME_LIMIT);
throw e;
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,8 +188,8 @@ public void start() {
Long.toString(nodeId))
.directory(workingDir.toFile())
.redirectOutput(stdout.toFile())
.redirectError(stderr.toFile());

.redirectError(stderr.toFile())
.inheritIO();
handle = builder.start().toHandle();
} catch (Exception e) {
throw new RuntimeException("node " + nodeId + ": Unable to start!", e);
Expand Down Expand Up @@ -453,7 +453,6 @@ private String getPlatformStatus() {
}
} catch (IOException | InterruptedException ignored) {
}

return statusMap.get(statusKey);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,6 @@ private TestTags() {
public static final String SMART_CONTRACT = "SMART_CONTRACT";
public static final String TIME_CONSUMING = "TIME_CONSUMING";
public static final String TOKEN = "TOKEN";
public static final String RESTART = "RESTART";
public static final String ND_RECONNECT = "ND_RECONNECT";
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,9 @@ public abstract class UtilOp extends HapiSpecOperation {
protected long feeFor(HapiSpec spec, Transaction txn, int numPayerSigs) throws Throwable {
return 0;
}

public UtilOp logged() {
verboseLoggingOn = true;
return this;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,13 @@ public StartNodesOp(@NonNull final NodeSelector selector) {
@Override
protected boolean run(@NonNull final HapiTestNode node) {
logger.info("Starting node {}...", node);
node.start();
try {
node.start();
} catch (Exception e) {
logger.error("Node {} failed to start", node);
throw e;
}
logger.info("Node {} has started...", node);
return false;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,9 @@ public WaitForShutdownOp(@NonNull final NodeSelector selector, int waitSeconds)

@Override
protected boolean run(@NonNull final HapiTestNode node) {
logger.info("Waiting for node {} to shut down, waiting up to {}s...", node, waitSeconds);
try {
node.waitForShutdown(waitSeconds);
logger.info("Node {} is shut down", node);
logger.info("Waiting for node {} to shut down, waiting up to {}s...", node, waitSeconds);
return false; // Do not stop the test, all is well.
} catch (TimeoutException e) {
logger.info("Node {} did not shut down within {}s", node, waitSeconds);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.inParallel;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sleepFor;
import static com.hedera.services.bdd.suites.HapiSuite.APP_PROPERTIES;
import static com.hedera.services.bdd.suites.HapiSuite.DEFAULT_PAYER;
import static com.hedera.services.bdd.suites.HapiSuite.GENESIS;
import static com.hedera.services.bdd.suites.HapiSuite.THREE_MONTHS_IN_SECONDS;
import static com.hedera.services.bdd.suites.token.TokenTransactSpecs.SUPPLY_KEY;
Expand Down Expand Up @@ -56,6 +55,7 @@ public class MixedOperations {
static final String RECEIVER = "receiver";
static final String TOPIC = "topic";
static final String TREASURY = "treasury";
static final String PAYER = "payer";
final int numSubmissions;

public MixedOperations(int numSubmissions) {
Expand All @@ -69,8 +69,9 @@ Supplier<HapiSpecOperation[]> mixedOps(
fileUpdate(APP_PROPERTIES).payingWith(GENESIS).overridingProps(Map.of("tokens.maxPerAccount", "10000000")),
inParallel(IntStream.range(0, 2 * numSubmissions)
.mapToObj(ignore -> cryptoTransfer(tinyBarsFromTo(SENDER, RECEIVER, 1L))
.payingWith(PAYER)
.logging()
.signedBy(SENDER, DEFAULT_PAYER))
.signedBy(SENDER, PAYER))
.toArray(HapiSpecOperation[]::new)),
sleepFor(10000),
inParallel(IntStream.range(0, numSubmissions)
Expand All @@ -83,11 +84,15 @@ Supplier<HapiSpecOperation[]> mixedOps(
.decimals(1)
.adminKey(ADMIN_KEY)
.supplyKey(SUPPLY_KEY)
.payingWith(PAYER)
.logging())
.toArray(HapiSpecOperation[]::new)),
sleepFor(10000),
inParallel(IntStream.range(0, numSubmissions)
.mapToObj(i -> tokenAssociate(SENDER, TOKEN + i).logging().signedBy(SENDER, DEFAULT_PAYER))
.mapToObj(i -> tokenAssociate(SENDER, TOKEN + i)
.payingWith(PAYER)
.logging()
.signedBy(SENDER, PAYER))
.toArray(HapiSpecOperation[]::new)),
sleepFor(10000),
submitMessageTo(TOPIC)
Expand All @@ -103,7 +108,8 @@ Supplier<HapiSpecOperation[]> mixedOps(
.mapToObj(ignore -> scheduleCreate(
"schedule" + scheduleId.incrementAndGet(),
cryptoTransfer(tinyBarsFromTo(SENDER, RECEIVER, r.nextInt(100000000))))
.signedBy(SENDER, DEFAULT_PAYER)
.payingWith(PAYER)
.signedBy(SENDER, PAYER)
.adminKey(SENDER)
.logging())
.toArray(HapiSpecOperation[]::new)),
Expand Down
Loading

0 comments on commit 18d169b

Please sign in to comment.