1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package org.apache.hadoop.hbase.io.hfile;
18
19 import static org.junit.Assert.assertEquals;
20 import static org.junit.Assert.assertTrue;
21
22 import java.io.IOException;
23 import java.util.Arrays;
24 import java.util.Collection;
25
26 import org.apache.hadoop.hbase.HBaseTestingUtility;
27 import org.apache.hadoop.hbase.HColumnDescriptor;
28 import org.apache.hadoop.hbase.testclassification.MediumTests;
29 import org.apache.hadoop.hbase.client.Get;
30 import org.apache.hadoop.hbase.client.Put;
31 import org.apache.hadoop.hbase.io.compress.Compression;
32 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
33 import org.apache.hadoop.hbase.regionserver.BloomType;
34 import org.apache.hadoop.hbase.regionserver.Region;
35 import org.apache.hadoop.hbase.util.Bytes;
36 import org.junit.Before;
37 import org.junit.Test;
38 import org.junit.experimental.categories.Category;
39 import org.junit.runner.RunWith;
40 import org.junit.runners.Parameterized;
41 import org.junit.runners.Parameterized.Parameters;
42
43
44
45
46
47
48
49
50
51
52 @Category(MediumTests.class)
53 @RunWith(Parameterized.class)
54 public class TestForceCacheImportantBlocks {
55 private final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
56
57 private static final String TABLE = "myTable";
58 private static final String CF = "myCF";
59 private static final byte[] CF_BYTES = Bytes.toBytes(CF);
60 private static final int MAX_VERSIONS = 3;
61 private static final int NUM_HFILES = 5;
62
63 private static final int ROWS_PER_HFILE = 100;
64 private static final int NUM_ROWS = NUM_HFILES * ROWS_PER_HFILE;
65 private static final int NUM_COLS_PER_ROW = 50;
66 private static final int NUM_TIMESTAMPS_PER_COL = 50;
67
68
69 private static final int BLOCK_SIZE = 256;
70
71 private static final Algorithm COMPRESSION_ALGORITHM =
72 Compression.Algorithm.GZ;
73 private static final BloomType BLOOM_TYPE = BloomType.ROW;
74
75 @SuppressWarnings("unused")
76
77 private final int hfileVersion;
78 private final boolean cfCacheEnabled;
79
80 @Parameters
81 public static Collection<Object[]> parameters() {
82
83 return Arrays.asList(
84 new Object[] { 2, true },
85 new Object[] { 2, false },
86 new Object[] { 3, true },
87 new Object[] { 3, false }
88 );
89 }
90
91 public TestForceCacheImportantBlocks(int hfileVersion, boolean cfCacheEnabled) {
92 this.hfileVersion = hfileVersion;
93 this.cfCacheEnabled = cfCacheEnabled;
94 TEST_UTIL.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, hfileVersion);
95 }
96
97 @Before
98 public void setup() {
99
100 CacheConfig.GLOBAL_BLOCK_CACHE_INSTANCE = null;
101 HFile.dataBlockReadCnt.set(0);
102 }
103
104 @Test
105 public void testCacheBlocks() throws IOException {
106
107 TEST_UTIL.getConfiguration().setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, BLOCK_SIZE);
108 HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(CF)).setMaxVersions(MAX_VERSIONS).
109 setCompressionType(COMPRESSION_ALGORITHM).
110 setBloomFilterType(BLOOM_TYPE);
111 hcd.setBlocksize(BLOCK_SIZE);
112 hcd.setBlockCacheEnabled(cfCacheEnabled);
113 Region region = TEST_UTIL.createTestRegion(TABLE, hcd);
114 BlockCache cache = region.getStore(hcd.getName()).getCacheConfig().getBlockCache();
115 CacheStats stats = cache.getStats();
116 writeTestData(region);
117 assertEquals(0, stats.getHitCount());
118 assertEquals(0, HFile.dataBlockReadCnt.get());
119
120
121 region.get(new Get(Bytes.toBytes("row" + 0)));
122 assertTrue(stats.getHitCount() > 0);
123 assertTrue(HFile.dataBlockReadCnt.get() > 0);
124 long missCount = stats.getMissCount();
125 region.get(new Get(Bytes.toBytes("row" + 0)));
126 if (this.cfCacheEnabled) assertEquals(missCount, stats.getMissCount());
127 else assertTrue(stats.getMissCount() > missCount);
128 }
129
130 private void writeTestData(Region region) throws IOException {
131 for (int i = 0; i < NUM_ROWS; ++i) {
132 Put put = new Put(Bytes.toBytes("row" + i));
133 for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
134 for (long ts = 1; ts < NUM_TIMESTAMPS_PER_COL; ++ts) {
135 put.add(CF_BYTES, Bytes.toBytes("col" + j), ts,
136 Bytes.toBytes("value" + i + "_" + j + "_" + ts));
137 }
138 }
139 region.put(put);
140 if ((i + 1) % ROWS_PER_HFILE == 0) {
141 region.flush(true);
142 }
143 }
144 }
145 }