Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion test/functional/test_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from collections import deque
import configparser
import datetime
import itertools
import os
import time
import shutil
Expand Down Expand Up @@ -546,9 +547,10 @@ def main():
failfast=args.failfast,
use_term_control=args.ansi,
skipunit=args.skipunit,
ci=args.ci,
)

def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, attempts=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False, use_term_control, skipunit=False):
def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, attempts=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False, use_term_control, skipunit=False, ci=False):
args = args or []

# Warn if dashd is already running
Expand Down Expand Up @@ -621,6 +623,19 @@ def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, attempts=1, enab
done_str = f"{len(test_results)}/{test_count} - {BOLD[1]}{test_result.name}{BOLD[0]}"
if test_result.status == "Passed":
logging.debug("%s passed, Duration: %s s" % (done_str, test_result.time))
# Remove blocks folder from test datadir to free up CI disk space
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why that doesn't happen with bitcoin? they have no cleanup mechanism on CI

Btw, I just noticed that all these blocks are not sparsed files, but regular files fullfilled zeroes.
So, they are compressed well when CI is done; but during a run they are eating disk space.

/** The pre-allocation chunk size for blk?????.dat files (since 0.8) */                              
static const unsigned int BLOCKFILE_CHUNK_SIZE = 0x1000000; // 16 MiB                                

3 nodes for each test - 48MiB used; 300 tests -> 10+ Gb of zeroes (probably even more)

I guess; this fix is going to work anyway; but is it a bug or expected behaviour that blk files are not sparsed?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

$ test/functional/rpc_help.py
<succeed>
$ find /tmp/dash_func_test_5coohz_8 -ls | grep blk 
   436673  16384 -rw-------   1 knst     knst     16777216 Nov 17 02:08 /tmp/dash_func_test_5coohz_8/node0/regtest/blocks/blk00000.dat
$ du -h /tmp/dash_func_test_5coohz_8/node0/regtest/blocks/blk00000.dat
16M     /tmp/dash_func_test_5coohz_8/node0/regtest/blocks/blk00000.dat

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

diff --git a/src/node/blockstorage.h b/src/node/blockstorage.h
index b627c26162..46e253b8aa 100644
--- a/src/node/blockstorage.h
+++ b/src/node/blockstorage.h
@@ -39,7 +39,7 @@ static constexpr bool DEFAULT_STOPAFTERBLOCKIMPORT{false};
 static constexpr bool DEFAULT_TIMESTAMPINDEX{false};
 
 /** The pre-allocation chunk size for blk?????.dat files (since 0.8) */
-static const unsigned int BLOCKFILE_CHUNK_SIZE = 0x1000000; // 16 MiB
+static const unsigned int BLOCKFILE_CHUNK_SIZE = 0x400000; // 4 MiB

What are possible downsides of this change?
I tried to run functional tests locally and it seems as IO become a smaller limiting factor; because funcitonal tests running as 30 parallel jobs (-j30) speeded up from 195s to just 180s

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Interesting... there is also an option -fastprune Use smaller block files and lower minimum prune height for testing purposes, so block file chunks are just 16kb each.

Copy link
Collaborator

@knst knst Nov 16, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

pruning probably will break many functional tests, including governance's related; blockfilter, etc

But we could one more param to use here to use small blocks, something like :

    return FlatFileSeq(gArgs.GetBlocksDirPath(), "blk", 
        gArgs.GetBoolArg("-tinyblk", false) ? 0x10000 (64kB)
        : (gArgs.GetBoolArg("-fastprune", false) ? 0x4000 /* 16kb */ : BLOCKFILE_CHUNK_SIZE));

if ci and os.path.isdir(testdir):
for i in itertools.count():
blocksdir = f"{testdir}/node{i}/regtest/blocks"
if not os.path.isdir(blocksdir):
break
if os.path.islink(blocksdir):
# Skip symlinks to avoid breaking custom test setups
continue
try:
shutil.rmtree(blocksdir)
except (OSError, PermissionError) as e:
logging.debug(f"Failed to remove {blocksdir}: {e}")
elif test_result.status == "Skipped":
logging.debug(f"{done_str} skipped ({skip_reason})")
else:
Expand Down
Loading