|
19 | 19 |
|
20 | 20 | import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasName;
|
21 | 21 | import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasParentSpanId;
|
| 22 | +import static org.apache.hadoop.hbase.io.hfile.CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY; |
22 | 23 | import static org.hamcrest.MatcherAssert.assertThat;
|
23 | 24 | import static org.hamcrest.Matchers.allOf;
|
24 | 25 | import static org.hamcrest.Matchers.hasItem;
|
25 | 26 | import static org.hamcrest.Matchers.hasItems;
|
26 | 27 | import static org.hamcrest.Matchers.not;
|
27 | 28 | import static org.junit.Assert.assertFalse;
|
28 | 29 | import static org.junit.Assert.assertTrue;
|
| 30 | +import static org.junit.Assert.fail; |
29 | 31 |
|
30 | 32 | import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule;
|
31 | 33 | import io.opentelemetry.sdk.trace.data.SpanData;
|
|
34 | 36 | import java.util.Random;
|
35 | 37 | import java.util.concurrent.ThreadLocalRandom;
|
36 | 38 | import java.util.concurrent.TimeUnit;
|
| 39 | +import java.util.function.BiConsumer; |
| 40 | +import java.util.function.BiFunction; |
37 | 41 | import org.apache.hadoop.conf.Configuration;
|
38 | 42 | import org.apache.hadoop.fs.FileSystem;
|
39 | 43 | import org.apache.hadoop.fs.Path;
|
|
47 | 51 | import org.apache.hadoop.hbase.client.trace.StringTraceRenderer;
|
48 | 52 | import org.apache.hadoop.hbase.fs.HFileSystem;
|
49 | 53 | import org.apache.hadoop.hbase.io.ByteBuffAllocator;
|
| 54 | +import org.apache.hadoop.hbase.io.compress.Compression; |
50 | 55 | import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
|
51 | 56 | import org.apache.hadoop.hbase.testclassification.IOTests;
|
52 | 57 | import org.apache.hadoop.hbase.testclassification.MediumTests;
|
@@ -148,36 +153,88 @@ private void readStoreFileLikeScanner(Path storeFilePath) throws Exception {
|
148 | 153 | }
|
149 | 154 |
|
150 | 155 | private void readStoreFile(Path storeFilePath) throws Exception {
|
| 156 | + readStoreFile(storeFilePath, (r, o) -> { |
| 157 | + HFileBlock block = null; |
| 158 | + try { |
| 159 | + block = r.readBlock(o, -1, false, true, false, true, null, null); |
| 160 | + } catch (IOException e) { |
| 161 | + fail(e.getMessage()); |
| 162 | + } |
| 163 | + return block; |
| 164 | + }, (key, block) -> { |
| 165 | + boolean isCached = blockCache.getBlock(key, true, false, true) != null; |
| 166 | + if ( |
| 167 | + block.getBlockType() == BlockType.DATA || block.getBlockType() == BlockType.ROOT_INDEX |
| 168 | + || block.getBlockType() == BlockType.INTERMEDIATE_INDEX |
| 169 | + ) { |
| 170 | + assertTrue(isCached); |
| 171 | + } |
| 172 | + }); |
| 173 | + } |
| 174 | + |
| 175 | + private void readStoreFileCacheOnly(Path storeFilePath) throws Exception { |
| 176 | + readStoreFile(storeFilePath, (r, o) -> { |
| 177 | + HFileBlock block = null; |
| 178 | + try { |
| 179 | + block = r.readBlock(o, -1, false, true, false, true, null, null, true); |
| 180 | + } catch (IOException e) { |
| 181 | + fail(e.getMessage()); |
| 182 | + } |
| 183 | + return block; |
| 184 | + }, (key, block) -> { |
| 185 | + boolean isCached = blockCache.getBlock(key, true, false, true) != null; |
| 186 | + if (block.getBlockType() == BlockType.DATA) { |
| 187 | + assertFalse(block.isUnpacked()); |
| 188 | + } else if ( |
| 189 | + block.getBlockType() == BlockType.ROOT_INDEX |
| 190 | + || block.getBlockType() == BlockType.INTERMEDIATE_INDEX |
| 191 | + ) { |
| 192 | + assertTrue(block.isUnpacked()); |
| 193 | + } |
| 194 | + assertTrue(isCached); |
| 195 | + }); |
| 196 | + } |
| 197 | + |
| 198 | + private void readStoreFile(Path storeFilePath, |
| 199 | + BiFunction<HFile.Reader, Long, HFileBlock> readFunction, |
| 200 | + BiConsumer<BlockCacheKey, HFileBlock> validationFunction) throws Exception { |
151 | 201 | // Open the file
|
152 | 202 | HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);
|
153 | 203 |
|
154 | 204 | while (!reader.prefetchComplete()) {
|
155 | 205 | // Sleep for a bit
|
156 | 206 | Thread.sleep(1000);
|
157 | 207 | }
|
158 |
| - |
159 |
| - // Check that all of the data blocks were preloaded |
160 |
| - BlockCache blockCache = cacheConf.getBlockCache().get(); |
161 | 208 | long offset = 0;
|
162 | 209 | while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
|
163 |
| - HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null); |
| 210 | + HFileBlock block = readFunction.apply(reader, offset); |
164 | 211 | BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset);
|
165 |
| - boolean isCached = blockCache.getBlock(blockCacheKey, true, false, true) != null; |
166 |
| - if ( |
167 |
| - block.getBlockType() == BlockType.DATA || block.getBlockType() == BlockType.ROOT_INDEX |
168 |
| - || block.getBlockType() == BlockType.INTERMEDIATE_INDEX |
169 |
| - ) { |
170 |
| - assertTrue(isCached); |
171 |
| - } |
| 212 | + validationFunction.accept(blockCacheKey, block); |
172 | 213 | offset += block.getOnDiskSizeWithHeader();
|
173 | 214 | }
|
174 | 215 | }
|
175 | 216 |
|
| 217 | + @Test |
| 218 | + public void testPrefetchCompressed() throws Exception { |
| 219 | + conf.setBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, true); |
| 220 | + cacheConf = new CacheConfig(conf, blockCache); |
| 221 | + HFileContext context = new HFileContextBuilder().withCompression(Compression.Algorithm.GZ) |
| 222 | + .withBlockSize(DATA_BLOCK_SIZE).build(); |
| 223 | + Path storeFile = writeStoreFile("TestPrefetchCompressed", context); |
| 224 | + readStoreFileCacheOnly(storeFile); |
| 225 | + conf.setBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, false); |
| 226 | + |
| 227 | + } |
| 228 | + |
176 | 229 | private Path writeStoreFile(String fname) throws IOException {
|
177 |
| - Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname); |
178 | 230 | HFileContext meta = new HFileContextBuilder().withBlockSize(DATA_BLOCK_SIZE).build();
|
| 231 | + return writeStoreFile(fname, meta); |
| 232 | + } |
| 233 | + |
| 234 | + private Path writeStoreFile(String fname, HFileContext context) throws IOException { |
| 235 | + Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname); |
179 | 236 | StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs)
|
180 |
| - .withOutputDir(storeFileParentDir).withFileContext(meta).build(); |
| 237 | + .withOutputDir(storeFileParentDir).withFileContext(context).build(); |
181 | 238 | Random rand = ThreadLocalRandom.current();
|
182 | 239 | final int rowLen = 32;
|
183 | 240 | for (int i = 0; i < NUM_KV; ++i) {
|
|
0 commit comments