diff --git a/CHANGELOG.md b/CHANGELOG.md index 02abc8a0487..9730af0fd5d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,8 @@ ### Additions and Improvements - Add `OperationTracer.tracePrepareTransaction`, where the sender account has not yet been altered[#6453](https://github.com/hyperledger/besu/pull/6453) +- Improve the high spec flag by limiting it to a few column families [#6354](https://github.com/hyperledger/besu/pull/6354) + ### Bug fixes - Fix the way an advertised host configured with `--p2p-host` is treated when communicating with the originator of a PING packet [#6225](https://github.com/hyperledger/besu/pull/6225) @@ -61,6 +63,7 @@ - Set Ethereum Classic mainnet activation block for Spiral network upgrade [#6267](https://github.com/hyperledger/besu/pull/6267) - Add custom genesis file name to config overview if specified [#6297](https://github.com/hyperledger/besu/pull/6297) - Update Gradle plugins and replace unmaintained License Gradle Plugin with the actively maintained Gradle License Report [#6275](https://github.com/hyperledger/besu/pull/6275) +- Optimize RocksDB WAL files, allows for faster restart and a more linear disk space utilization [#6328](https://github.com/hyperledger/besu/pull/6328) ### Bug fixes - Hotfix for selfdestruct preimages on bonsai [#6359]((https://github.com/hyperledger/besu/pull/6359) diff --git a/ethereum/core/src/main/java/org/hyperledger/besu/ethereum/storage/keyvalue/KeyValueSegmentIdentifier.java b/ethereum/core/src/main/java/org/hyperledger/besu/ethereum/storage/keyvalue/KeyValueSegmentIdentifier.java index 9cce0230cdb..282c314157d 100644 --- a/ethereum/core/src/main/java/org/hyperledger/besu/ethereum/storage/keyvalue/KeyValueSegmentIdentifier.java +++ b/ethereum/core/src/main/java/org/hyperledger/besu/ethereum/storage/keyvalue/KeyValueSegmentIdentifier.java @@ -16,18 +16,21 @@ import org.hyperledger.besu.plugin.services.storage.SegmentIdentifier; +import java.nio.charset.StandardCharsets; + import org.bouncycastle.util.Arrays; public enum KeyValueSegmentIdentifier implements SegmentIdentifier { - BLOCKCHAIN(new byte[] {1}, true), - WORLD_STATE(new byte[] {2}, new int[] {0, 1}), + DEFAULT("default".getBytes(StandardCharsets.UTF_8)), + BLOCKCHAIN(new byte[] {1}, true, true), + WORLD_STATE(new byte[] {2}, new int[] {0, 1}, false, true), PRIVATE_TRANSACTIONS(new byte[] {3}), PRIVATE_STATE(new byte[] {4}), PRUNING_STATE(new byte[] {5}, new int[] {0, 1}), - ACCOUNT_INFO_STATE(new byte[] {6}, new int[] {2}), + ACCOUNT_INFO_STATE(new byte[] {6}, new int[] {2}, false, true), CODE_STORAGE(new byte[] {7}, new int[] {2}), - ACCOUNT_STORAGE_STORAGE(new byte[] {8}, new int[] {2}), - TRIE_BRANCH_STORAGE(new byte[] {9}, new int[] {2}), + ACCOUNT_STORAGE_STORAGE(new byte[] {8}, new int[] {2}, false, true), + TRIE_BRANCH_STORAGE(new byte[] {9}, new int[] {2}, false, true), TRIE_LOG_STORAGE(new byte[] {10}, new int[] {2}), VARIABLES(new byte[] {11}), // formerly GOQUORUM_PRIVATE_WORLD_STATE @@ -45,24 +48,30 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier { private final byte[] id; private final int[] versionList; private final boolean containsStaticData; + private final boolean eligibleToHighSpecFlag; KeyValueSegmentIdentifier(final byte[] id) { this(id, new int[] {0, 1, 2}); } - KeyValueSegmentIdentifier(final byte[] id, final boolean containsStaticData) { - this(id, new int[] {0, 1, 2}, containsStaticData); + KeyValueSegmentIdentifier( + final byte[] id, final boolean containsStaticData, final boolean eligibleToHighSpecFlag) { + this(id, new int[] {0, 1, 2}, containsStaticData, eligibleToHighSpecFlag); } KeyValueSegmentIdentifier(final byte[] id, final int[] versionList) { - this(id, versionList, false); + this(id, versionList, false, false); } KeyValueSegmentIdentifier( - final byte[] id, final int[] versionList, final boolean containsStaticData) { + final byte[] id, + final int[] versionList, + final boolean containsStaticData, + final boolean eligibleToHighSpecFlag) { this.id = id; this.versionList = versionList; this.containsStaticData = containsStaticData; + this.eligibleToHighSpecFlag = eligibleToHighSpecFlag; } @Override @@ -80,6 +89,11 @@ public boolean containsStaticData() { return containsStaticData; } + @Override + public boolean isEligibleToHighSpecFlag() { + return eligibleToHighSpecFlag; + } + @Override public boolean includeInDatabaseVersion(final int version) { return Arrays.contains(versionList, version); diff --git a/plugin-api/build.gradle b/plugin-api/build.gradle index 1dcfd00f7ee..c72ad934339 100644 --- a/plugin-api/build.gradle +++ b/plugin-api/build.gradle @@ -69,7 +69,7 @@ Calculated : ${currentHash} tasks.register('checkAPIChanges', FileStateChecker) { description = "Checks that the API for the Plugin-API project does not change without deliberate thought" files = sourceSets.main.allJava.files - knownHash = 'IGq+V3KaStHCRFkeK3KwPxJYKO4RX9YM1O4JYITk8S8=' + knownHash = 'ZsovOR0oPfomcLP4b+HjikWzM0Tx6sCwi68mf5qwZf4=' } check.dependsOn('checkAPIChanges') diff --git a/plugin-api/src/main/java/org/hyperledger/besu/plugin/services/storage/SegmentIdentifier.java b/plugin-api/src/main/java/org/hyperledger/besu/plugin/services/storage/SegmentIdentifier.java index c535966876f..5db7fea5091 100644 --- a/plugin-api/src/main/java/org/hyperledger/besu/plugin/services/storage/SegmentIdentifier.java +++ b/plugin-api/src/main/java/org/hyperledger/besu/plugin/services/storage/SegmentIdentifier.java @@ -56,4 +56,12 @@ default boolean includeInDatabaseVersion(final int version) { * @return true if the segment contains only static data */ boolean containsStaticData(); + + /** + * This flag defines which segment is eligible for the high spec flag, so basically what column + * family is involved with high spec flag + * + * @return true if the segment is involved with the high spec flag + */ + boolean isEligibleToHighSpecFlag(); } diff --git a/plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorage.java b/plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorage.java index c6157217e3d..a7192cc3b51 100644 --- a/plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorage.java +++ b/plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorage.java @@ -70,18 +70,16 @@ public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValueStorage { private static final Logger LOG = LoggerFactory.getLogger(RocksDBColumnarKeyValueStorage.class); - static final String DEFAULT_COLUMN = "default"; private static final int ROCKSDB_FORMAT_VERSION = 5; private static final long ROCKSDB_BLOCK_SIZE = 32768; /** RocksDb blockcache size when using the high spec option */ protected static final long ROCKSDB_BLOCKCACHE_SIZE_HIGH_SPEC = 1_073_741_824L; /** RocksDb memtable size when using the high spec option */ - protected static final long ROCKSDB_MEMTABLE_SIZE_HIGH_SPEC = 1_073_741_824L; + protected static final long ROCKSDB_MEMTABLE_SIZE_HIGH_SPEC = 536_870_912L; /** Max total size of all WAL file, after which a flush is triggered */ protected static final long WAL_MAX_TOTAL_SIZE = 1_073_741_824L; /** Expected size of a single WAL file, to determine how many WAL files to keep around */ protected static final long EXPECTED_WAL_FILE_SIZE = 67_108_864L; - /** RocksDb number of log files to keep on disk */ private static final long NUMBER_OF_LOG_FILES_TO_KEEP = 7; /** RocksDb Time to roll a log file (1 day = 3600 * 24 seconds) */ @@ -144,7 +142,6 @@ public RocksDBColumnarKeyValueStorage( this.rocksDBMetricsFactory = rocksDBMetricsFactory; try { - final ColumnFamilyOptions columnFamilyOptions = new ColumnFamilyOptions(); trimmedSegments = new ArrayList<>(defaultSegments); final List existingColumnFamilies = RocksDB.listColumnFamilies(new Options(), configuration.getDatabaseDir().toString()); @@ -156,14 +153,9 @@ public RocksDBColumnarKeyValueStorage( .noneMatch(existed -> Arrays.equals(existed, ignorableSegment.getId()))) .forEach(trimmedSegments::remove); columnDescriptors = - trimmedSegments.stream().map(this::createColumnDescriptor).collect(Collectors.toList()); - columnDescriptors.add( - new ColumnFamilyDescriptor( - DEFAULT_COLUMN.getBytes(StandardCharsets.UTF_8), - columnFamilyOptions - .setTtl(0) - .setCompressionType(CompressionType.LZ4_COMPRESSION) - .setTableFormatConfig(createBlockBasedTableConfig(configuration)))); + trimmedSegments.stream() + .map(segment -> createColumnDescriptor(segment, configuration)) + .collect(Collectors.toList()); setGlobalOptions(configuration, stats); @@ -174,6 +166,80 @@ public RocksDBColumnarKeyValueStorage( } } + /** + * Create a Column Family Descriptor for a given segment It defines basically the different + * options to apply to the corresponding Column Family + * + * @param segment the segment identifier + * @param configuration RocksDB configuration + * @return a column family descriptor + */ + private ColumnFamilyDescriptor createColumnDescriptor( + final SegmentIdentifier segment, final RocksDBConfiguration configuration) { + + BlockBasedTableConfig basedTableConfig = createBlockBasedTableConfig(segment, configuration); + + final var options = + new ColumnFamilyOptions() + .setTtl(0) + .setCompressionType(CompressionType.LZ4_COMPRESSION) + .setTableFormatConfig(basedTableConfig); + + if (segment.containsStaticData()) { + options + .setEnableBlobFiles(true) + .setEnableBlobGarbageCollection(false) + .setMinBlobSize(100) + .setBlobCompressionType(CompressionType.LZ4_COMPRESSION); + } + + return new ColumnFamilyDescriptor(segment.getId(), options); + } + + /*** + * Create a Block Base Table configuration for each segment, depending on the configuration in place + * and the segment itself + * + * @param segment The segment related to the column family + * @param config RocksDB configuration + * @return Block Base Table configuration + */ + private BlockBasedTableConfig createBlockBasedTableConfig( + final SegmentIdentifier segment, final RocksDBConfiguration config) { + final LRUCache cache = + new LRUCache( + config.isHighSpec() && segment.isEligibleToHighSpecFlag() + ? ROCKSDB_BLOCKCACHE_SIZE_HIGH_SPEC + : config.getCacheCapacity()); + return new BlockBasedTableConfig() + .setFormatVersion(ROCKSDB_FORMAT_VERSION) + .setBlockCache(cache) + .setFilterPolicy(new BloomFilter(10, false)) + .setPartitionFilters(true) + .setCacheIndexAndFilterBlocks(false) + .setBlockSize(ROCKSDB_BLOCK_SIZE); + } + + /*** + * Set Global options (DBOptions) + * + * @param configuration RocksDB configuration + * @param stats The statistics object + */ + private void setGlobalOptions(final RocksDBConfiguration configuration, final Statistics stats) { + options = new DBOptions(); + options + .setCreateIfMissing(true) + .setMaxOpenFiles(configuration.getMaxOpenFiles()) + .setStatistics(stats) + .setCreateMissingColumnFamilies(true) + .setLogFileTimeToRoll(TIME_TO_ROLL_LOG_FILE) + .setKeepLogFileNum(NUMBER_OF_LOG_FILES_TO_KEEP) + .setEnv(Env.getDefault().setBackgroundThreads(configuration.getBackgroundThreadCount())) + .setMaxTotalWalSize(WAL_MAX_TOTAL_SIZE) + .setRecycleLogFileNum(WAL_MAX_TOTAL_SIZE / EXPECTED_WAL_FILE_SIZE); + } + /** * Parse RocksDBException and wrap in StorageException * @@ -219,42 +285,6 @@ protected static StorageException parseRocksDBException( } } - private ColumnFamilyDescriptor createColumnDescriptor(final SegmentIdentifier segment) { - final var options = - new ColumnFamilyOptions() - .setTtl(0) - .setCompressionType(CompressionType.LZ4_COMPRESSION) - .setTableFormatConfig(createBlockBasedTableConfig(configuration)); - - if (segment.containsStaticData()) { - options - .setEnableBlobFiles(true) - .setEnableBlobGarbageCollection(false) - .setMinBlobSize(100) - .setBlobCompressionType(CompressionType.LZ4_COMPRESSION); - } - - return new ColumnFamilyDescriptor(segment.getId(), options); - } - - private void setGlobalOptions(final RocksDBConfiguration configuration, final Statistics stats) { - options = new DBOptions(); - options - .setCreateIfMissing(true) - .setMaxOpenFiles(configuration.getMaxOpenFiles()) - .setMaxTotalWalSize(WAL_MAX_TOTAL_SIZE) - .setRecycleLogFileNum(WAL_MAX_TOTAL_SIZE / EXPECTED_WAL_FILE_SIZE) - .setStatistics(stats) - .setCreateMissingColumnFamilies(true) - .setLogFileTimeToRoll(TIME_TO_ROLL_LOG_FILE) - .setKeepLogFileNum(NUMBER_OF_LOG_FILES_TO_KEEP) - .setEnv(Env.getDefault().setBackgroundThreads(configuration.getBackgroundThreadCount())); - - if (configuration.isHighSpec()) { - options.setDbWriteBufferSize(ROCKSDB_MEMTABLE_SIZE_HIGH_SPEC); - } - } - void initMetrics() { metrics = rocksDBMetricsFactory.create(metricsSystem, configuration, getDB(), stats); } @@ -287,19 +317,6 @@ void initColumnHandles() throws RocksDBException { })); } - BlockBasedTableConfig createBlockBasedTableConfig(final RocksDBConfiguration config) { - final LRUCache cache = - new LRUCache( - config.isHighSpec() ? ROCKSDB_BLOCKCACHE_SIZE_HIGH_SPEC : config.getCacheCapacity()); - return new BlockBasedTableConfig() - .setFormatVersion(ROCKSDB_FORMAT_VERSION) - .setBlockCache(cache) - .setFilterPolicy(new BloomFilter(10, false)) - .setPartitionFilters(true) - .setCacheIndexAndFilterBlocks(false) - .setBlockSize(ROCKSDB_BLOCK_SIZE); - } - /** * Safe method to map segment identifier to column handle. * diff --git a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValuePrivacyStorageFactoryTest.java b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValuePrivacyStorageFactoryTest.java index e0ed3f7b396..65d8ee7b58f 100644 --- a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValuePrivacyStorageFactoryTest.java +++ b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValuePrivacyStorageFactoryTest.java @@ -45,7 +45,7 @@ public class RocksDBKeyValuePrivacyStorageFactoryTest { @TempDir private Path temporaryFolder; private final ObservableMetricsSystem metricsSystem = new NoOpMetricsSystem(); private final SegmentIdentifier segment = TestSegment.BAR; - private final List segments = List.of(segment); + private final List segments = List.of(TestSegment.DEFAULT, segment); @Test public void shouldDetectVersion1DatabaseIfNoMetadataFileFound() throws Exception { diff --git a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValueStorageFactoryTest.java b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValueStorageFactoryTest.java index c3a8c32eb27..8f1e116b3a1 100644 --- a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValueStorageFactoryTest.java +++ b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValueStorageFactoryTest.java @@ -50,7 +50,7 @@ public class RocksDBKeyValueStorageFactoryTest { @TempDir public Path temporaryFolder; private final ObservableMetricsSystem metricsSystem = new NoOpMetricsSystem(); private final SegmentIdentifier segment = TestSegment.FOO; - private final List segments = List.of(segment); + private final List segments = List.of(TestSegment.DEFAULT, segment); @Test public void shouldCreateCorrectMetadataFileForLatestVersion() throws Exception { diff --git a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/OptimisticTransactionDBRocksDBColumnarKeyValueStorageTest.java b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/OptimisticTransactionDBRocksDBColumnarKeyValueStorageTest.java index 7e8cef52cc2..bf838f9c4fd 100644 --- a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/OptimisticTransactionDBRocksDBColumnarKeyValueStorageTest.java +++ b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/OptimisticTransactionDBRocksDBColumnarKeyValueStorageTest.java @@ -39,7 +39,7 @@ protected SegmentedKeyValueStorage createSegmentedStore() throws Exception { new RocksDBConfigurationBuilder() .databaseDir(Files.createTempDirectory("segmentedStore")) .build(), - Arrays.asList(TestSegment.FOO, TestSegment.BAR), + Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR), List.of(), new NoOpMetricsSystem(), RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS); diff --git a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorageTest.java b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorageTest.java index 5462d239b91..80b9685b792 100644 --- a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorageTest.java +++ b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorageTest.java @@ -194,13 +194,17 @@ public void dbShouldIgnoreExperimentalSegmentsIfNotExisted(@TempDir final Path t SegmentedKeyValueStorage store = createSegmentedStore( testPath, - Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), + Arrays.asList( + TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), List.of(TestSegment.EXPERIMENTAL)); store.close(); // new db will be backward compatible with db without knowledge of experimental column family store = - createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of()); + createSegmentedStore( + testPath, + Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR), + List.of()); store.close(); } @@ -212,14 +216,18 @@ public void dbShouldNotIgnoreExperimentalSegmentsIfExisted(@TempDir final Path t SegmentedKeyValueStorage store = createSegmentedStore( testPath, - Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), + Arrays.asList( + TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), List.of()); store.close(); // new db will not be backward compatible with db without knowledge of experimental column // family try { - createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of()); + createSegmentedStore( + testPath, + Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR), + List.of()); fail("DB without knowledge of experimental column family should fail"); } catch (StorageException e) { assertThat(e.getMessage()).contains("Unhandled column families"); @@ -230,7 +238,8 @@ public void dbShouldNotIgnoreExperimentalSegmentsIfExisted(@TempDir final Path t store = createSegmentedStore( testPath, - Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), + Arrays.asList( + TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), List.of(TestSegment.EXPERIMENTAL)); store.close(); } @@ -242,27 +251,35 @@ public void dbWillBeBackwardIncompatibleAfterExperimentalSegmentsAreAdded( SegmentedKeyValueStorage store = createSegmentedStore( testPath, - Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), + Arrays.asList( + TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), List.of(TestSegment.EXPERIMENTAL)); store.close(); // new db will be backward compatible with db without knowledge of experimental column family store = - createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of()); + createSegmentedStore( + testPath, + Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR), + List.of()); store.close(); // Create new db without ignoring experimental colum family will add column to db store = createSegmentedStore( testPath, - Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), + Arrays.asList( + TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL), List.of()); store.close(); // Now, the db will be backward incompatible with db without knowledge of experimental column // family try { - createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of()); + createSegmentedStore( + testPath, + Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR), + List.of()); fail("DB without knowledge of experimental column family should fail"); } catch (StorageException e) { assertThat(e.getMessage()).contains("Unhandled column families"); @@ -293,7 +310,10 @@ public void createStoreMustCreateMetrics() throws Exception { final SegmentedKeyValueStorage store = createSegmentedStore( - folder, metricsSystemMock, List.of(TestSegment.FOO), List.of(TestSegment.EXPERIMENTAL)); + folder, + metricsSystemMock, + List.of(TestSegment.DEFAULT, TestSegment.FOO), + List.of(TestSegment.EXPERIMENTAL)); KeyValueStorage keyValueStorage = new SegmentedKeyValueStorageAdapter(TestSegment.FOO, store); @@ -343,24 +363,28 @@ public void createStoreMustCreateMetrics() throws Exception { } public enum TestSegment implements SegmentIdentifier { + DEFAULT("default".getBytes(StandardCharsets.UTF_8)), FOO(new byte[] {1}), BAR(new byte[] {2}), EXPERIMENTAL(new byte[] {3}), - STATIC_DATA(new byte[] {4}, true); + STATIC_DATA(new byte[] {4}, true, false); private final byte[] id; private final String nameAsUtf8; private final boolean containsStaticData; + private final boolean eligibleToHighSpecFlag; TestSegment(final byte[] id) { - this(id, false); + this(id, false, false); } - TestSegment(final byte[] id, final boolean containsStaticData) { + TestSegment( + final byte[] id, final boolean containsStaticData, final boolean eligibleToHighSpecFlag) { this.id = id; this.nameAsUtf8 = new String(id, StandardCharsets.UTF_8); this.containsStaticData = containsStaticData; + this.eligibleToHighSpecFlag = eligibleToHighSpecFlag; } @Override @@ -377,6 +401,11 @@ public byte[] getId() { public boolean containsStaticData() { return containsStaticData; } + + @Override + public boolean isEligibleToHighSpecFlag() { + return eligibleToHighSpecFlag; + } } protected abstract SegmentedKeyValueStorage createSegmentedStore() throws Exception; diff --git a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/TransactionDBRocksDBColumnarKeyValueStorageTest.java b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/TransactionDBRocksDBColumnarKeyValueStorageTest.java index bed64f58404..374f5534567 100644 --- a/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/TransactionDBRocksDBColumnarKeyValueStorageTest.java +++ b/plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/TransactionDBRocksDBColumnarKeyValueStorageTest.java @@ -36,7 +36,7 @@ public class TransactionDBRocksDBColumnarKeyValueStorageTest protected SegmentedKeyValueStorage createSegmentedStore() throws Exception { return new TransactionDBRocksDBColumnarKeyValueStorage( new RocksDBConfigurationBuilder().databaseDir(getTempSubFolder(folder)).build(), - Arrays.asList(TestSegment.FOO, TestSegment.BAR), + Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR), List.of(), new NoOpMetricsSystem(), RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS); diff --git a/services/kvstore/src/main/java/org/hyperledger/besu/services/kvstore/InMemoryKeyValueStorage.java b/services/kvstore/src/main/java/org/hyperledger/besu/services/kvstore/InMemoryKeyValueStorage.java index dbd62eaff9a..e3d82ca9dc4 100644 --- a/services/kvstore/src/main/java/org/hyperledger/besu/services/kvstore/InMemoryKeyValueStorage.java +++ b/services/kvstore/src/main/java/org/hyperledger/besu/services/kvstore/InMemoryKeyValueStorage.java @@ -50,6 +50,11 @@ public byte[] getId() { public boolean containsStaticData() { return false; } + + @Override + public boolean isEligibleToHighSpecFlag() { + return false; + } }; private static ConcurrentMap>> asSegmentMap(