1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile;
20
21 import java.lang.ref.WeakReference;
22 import java.nio.ByteBuffer;
23 import java.util.EnumMap;
24 import java.util.Iterator;
25 import java.util.List;
26 import java.util.Map;
27 import java.util.PriorityQueue;
28 import java.util.SortedSet;
29 import java.util.TreeSet;
30 import java.util.concurrent.ConcurrentHashMap;
31 import java.util.concurrent.Executors;
32 import java.util.concurrent.ScheduledExecutorService;
33 import java.util.concurrent.TimeUnit;
34 import java.util.concurrent.atomic.AtomicLong;
35 import java.util.concurrent.locks.ReentrantLock;
36
37 import com.google.common.base.Objects;
38 import org.apache.commons.logging.Log;
39 import org.apache.commons.logging.LogFactory;
40 import org.apache.hadoop.hbase.classification.InterfaceAudience;
41 import org.apache.hadoop.conf.Configuration;
42 import org.apache.hadoop.hbase.io.HeapSize;
43 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
44 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
45 import org.apache.hadoop.hbase.util.Bytes;
46 import org.apache.hadoop.hbase.util.ClassSize;
47 import org.apache.hadoop.hbase.util.HasThread;
48 import org.apache.hadoop.util.StringUtils;
49 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
50
51 import com.google.common.annotations.VisibleForTesting;
52 import com.google.common.util.concurrent.ThreadFactoryBuilder;
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98 @InterfaceAudience.Private
99 @JsonIgnoreProperties({"encodingCountsForTest"})
100 public class LruBlockCache implements ResizableBlockCache, HeapSize {
101
102 private static final Log LOG = LogFactory.getLog(LruBlockCache.class);
103
104
105
106
107
108 static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
109
110
111
112
113 static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
114
115 static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.single.percentage";
116 static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.multi.percentage";
117 static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.memory.percentage";
118
119
120
121
122
123
124 static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME = "hbase.lru.rs.inmemoryforcemode";
125
126
127
128
129 static final float DEFAULT_LOAD_FACTOR = 0.75f;
130 static final int DEFAULT_CONCURRENCY_LEVEL = 16;
131
132
133 static final float DEFAULT_MIN_FACTOR = 0.95f;
134 static final float DEFAULT_ACCEPTABLE_FACTOR = 0.99f;
135
136
137 static final float DEFAULT_SINGLE_FACTOR = 0.25f;
138 static final float DEFAULT_MULTI_FACTOR = 0.50f;
139 static final float DEFAULT_MEMORY_FACTOR = 0.25f;
140
141 static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
142
143
144 static final int statThreadPeriod = 60 * 5;
145 private static final String LRU_MAX_BLOCK_SIZE = "hbase.lru.max.block.size";
146 private static final long DEFAULT_MAX_BLOCK_SIZE = 16L * 1024L * 1024L;
147
148
149 private final Map<BlockCacheKey,LruCachedBlock> map;
150
151
152 private final ReentrantLock evictionLock = new ReentrantLock(true);
153 private final long maxBlockSize;
154
155
156 private volatile boolean evictionInProgress = false;
157
158
159 private final EvictionThread evictionThread;
160
161
162 private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,
163 new ThreadFactoryBuilder().setNameFormat("LruBlockCacheStatsExecutor").setDaemon(true).build());
164
165
166 private final AtomicLong size;
167
168
169 private final AtomicLong elements;
170
171
172 private final AtomicLong count;
173
174
175 private final CacheStats stats;
176
177
178 private long maxSize;
179
180
181 private long blockSize;
182
183
184 private float acceptableFactor;
185
186
187 private float minFactor;
188
189
190 private float singleFactor;
191
192
193 private float multiFactor;
194
195
196 private float memoryFactor;
197
198
199 private long overhead;
200
201
202 private boolean forceInMemory;
203
204
205 private BlockCache victimHandler = null;
206
207
208
209
210
211
212
213
214
215
216 public LruBlockCache(long maxSize, long blockSize) {
217 this(maxSize, blockSize, true);
218 }
219
220
221
222
223 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread) {
224 this(maxSize, blockSize, evictionThread,
225 (int)Math.ceil(1.2*maxSize/blockSize),
226 DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
227 DEFAULT_MIN_FACTOR, DEFAULT_ACCEPTABLE_FACTOR,
228 DEFAULT_SINGLE_FACTOR,
229 DEFAULT_MULTI_FACTOR,
230 DEFAULT_MEMORY_FACTOR,
231 false,
232 DEFAULT_MAX_BLOCK_SIZE
233 );
234 }
235
236 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
237 this(maxSize, blockSize, evictionThread,
238 (int)Math.ceil(1.2*maxSize/blockSize),
239 DEFAULT_LOAD_FACTOR,
240 DEFAULT_CONCURRENCY_LEVEL,
241 conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
242 conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
243 conf.getFloat(LRU_SINGLE_PERCENTAGE_CONFIG_NAME, DEFAULT_SINGLE_FACTOR),
244 conf.getFloat(LRU_MULTI_PERCENTAGE_CONFIG_NAME, DEFAULT_MULTI_FACTOR),
245 conf.getFloat(LRU_MEMORY_PERCENTAGE_CONFIG_NAME, DEFAULT_MEMORY_FACTOR),
246 conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE),
247 conf.getLong(LRU_MAX_BLOCK_SIZE, DEFAULT_MAX_BLOCK_SIZE)
248 );
249 }
250
251 public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
252 this(maxSize, blockSize, true, conf);
253 }
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
270 int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
271 float minFactor, float acceptableFactor, float singleFactor,
272 float multiFactor, float memoryFactor, boolean forceInMemory, long maxBlockSize) {
273 this.maxBlockSize = maxBlockSize;
274 if(singleFactor + multiFactor + memoryFactor != 1 ||
275 singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) {
276 throw new IllegalArgumentException("Single, multi, and memory factors " +
277 " should be non-negative and total 1.0");
278 }
279 if(minFactor >= acceptableFactor) {
280 throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
281 }
282 if(minFactor >= 1.0f || acceptableFactor >= 1.0f) {
283 throw new IllegalArgumentException("all factors must be < 1");
284 }
285 this.maxSize = maxSize;
286 this.blockSize = blockSize;
287 this.forceInMemory = forceInMemory;
288 map = new ConcurrentHashMap<BlockCacheKey,LruCachedBlock>(mapInitialSize,
289 mapLoadFactor, mapConcurrencyLevel);
290 this.minFactor = minFactor;
291 this.acceptableFactor = acceptableFactor;
292 this.singleFactor = singleFactor;
293 this.multiFactor = multiFactor;
294 this.memoryFactor = memoryFactor;
295 this.stats = new CacheStats(this.getClass().getSimpleName());
296 this.count = new AtomicLong(0);
297 this.elements = new AtomicLong(0);
298 this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
299 this.size = new AtomicLong(this.overhead);
300 if(evictionThread) {
301 this.evictionThread = new EvictionThread(this);
302 this.evictionThread.start();
303 } else {
304 this.evictionThread = null;
305 }
306
307
308 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
309 statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
310 }
311
312 @Override
313 public void setMaxSize(long maxSize) {
314 this.maxSize = maxSize;
315 if(this.size.get() > acceptableSize() && !evictionInProgress) {
316 runEviction();
317 }
318 }
319
320
321
322
323
324
325
326
327
328
329
330
331
332 @Override
333 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory,
334 final boolean cacheDataInL1) {
335
336 if (buf.heapSize() > maxBlockSize) {
337
338
339
340 if (stats.failInsert() % 50 == 0) {
341 LOG.warn("Trying to cache too large a block "
342 + cacheKey.getHfileName() + " @ "
343 + cacheKey.getOffset()
344 + " is " + buf.heapSize()
345 + " which is larger than " + maxBlockSize);
346 }
347 return;
348 }
349
350 LruCachedBlock cb = map.get(cacheKey);
351 if (cb != null) {
352
353 if (compare(buf, cb.getBuffer()) != 0) {
354 throw new RuntimeException("Cached block contents differ, which should not have happened."
355 + "cacheKey:" + cacheKey);
356 }
357 String msg = "Cached an already cached block: " + cacheKey + " cb:" + cb.getCacheKey();
358 msg += ". This is harmless and can happen in rare cases (see HBASE-8547)";
359 LOG.warn(msg);
360 return;
361 }
362 cb = new LruCachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory);
363 long newSize = updateSizeMetrics(cb, false);
364 map.put(cacheKey, cb);
365 long val = elements.incrementAndGet();
366 if (LOG.isTraceEnabled()) {
367 long size = map.size();
368 assertCounterSanity(size, val);
369 }
370 if (newSize > acceptableSize() && !evictionInProgress) {
371 runEviction();
372 }
373 }
374
375
376
377
378
379 private static void assertCounterSanity(long mapSize, long counterVal) {
380 if (counterVal < 0) {
381 LOG.trace("counterVal overflow. Assertions unreliable. counterVal=" + counterVal +
382 ", mapSize=" + mapSize);
383 return;
384 }
385 if (mapSize < Integer.MAX_VALUE) {
386 double pct_diff = Math.abs((((double) counterVal) / ((double) mapSize)) - 1.);
387 if (pct_diff > 0.05) {
388 LOG.trace("delta between reported and actual size > 5%. counterVal=" + counterVal +
389 ", mapSize=" + mapSize);
390 }
391 }
392 }
393
394 private int compare(Cacheable left, Cacheable right) {
395 ByteBuffer l = ByteBuffer.allocate(left.getSerializedLength());
396 left.serialize(l);
397 ByteBuffer r = ByteBuffer.allocate(right.getSerializedLength());
398 right.serialize(r);
399 return Bytes.compareTo(l.array(), l.arrayOffset(), l.limit(),
400 r.array(), r.arrayOffset(), r.limit());
401 }
402
403
404
405
406
407
408
409 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
410 cacheBlock(cacheKey, buf, false, false);
411 }
412
413
414
415
416
417
418
419
420
421 protected long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
422 long heapsize = cb.heapSize();
423 if (evict) {
424 heapsize *= -1;
425 }
426 return size.addAndGet(heapsize);
427 }
428
429
430
431
432
433
434
435
436
437
438 @Override
439 public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat,
440 boolean updateCacheMetrics) {
441 LruCachedBlock cb = map.get(cacheKey);
442 if (cb == null) {
443 if (!repeat && updateCacheMetrics) stats.miss(caching, cacheKey.isPrimary());
444
445
446
447 if (victimHandler != null && !repeat) {
448 Cacheable result = victimHandler.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
449
450
451 if (result != null && caching) {
452 cacheBlock(cacheKey, result,
453 }
454 return result;
455 }
456 return null;
457 }
458 if (updateCacheMetrics) stats.hit(caching, cacheKey.isPrimary());
459 cb.access(count.incrementAndGet());
460 return cb.getBuffer();
461 }
462
463
464
465
466
467
468 public boolean containsBlock(BlockCacheKey cacheKey) {
469 return map.containsKey(cacheKey);
470 }
471
472 @Override
473 public boolean evictBlock(BlockCacheKey cacheKey) {
474 LruCachedBlock cb = map.get(cacheKey);
475 if (cb == null) return false;
476 evictBlock(cb, false);
477 return true;
478 }
479
480
481
482
483
484
485
486
487
488
489
490 @Override
491 public int evictBlocksByHfileName(String hfileName) {
492 int numEvicted = 0;
493 for (BlockCacheKey key : map.keySet()) {
494 if (key.getHfileName().equals(hfileName)) {
495 if (evictBlock(key))
496 ++numEvicted;
497 }
498 }
499 if (victimHandler != null) {
500 numEvicted += victimHandler.evictBlocksByHfileName(hfileName);
501 }
502 return numEvicted;
503 }
504
505
506
507
508
509
510
511
512
513 protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess) {
514 map.remove(block.getCacheKey());
515 updateSizeMetrics(block, true);
516 long val = elements.decrementAndGet();
517 if (LOG.isTraceEnabled()) {
518 long size = map.size();
519 assertCounterSanity(size, val);
520 }
521 stats.evicted(block.getCachedTime(), block.getCacheKey().isPrimary());
522 if (evictedByEvictionProcess && victimHandler != null) {
523 if (victimHandler instanceof BucketCache) {
524 boolean wait = getCurrentSize() < acceptableSize();
525 boolean inMemory = block.getPriority() == BlockPriority.MEMORY;
526 ((BucketCache)victimHandler).cacheBlockWithWait(block.getCacheKey(), block.getBuffer(),
527 inMemory, wait);
528 } else {
529 victimHandler.cacheBlock(block.getCacheKey(), block.getBuffer());
530 }
531 }
532 return block.heapSize();
533 }
534
535
536
537
538 private void runEviction() {
539 if(evictionThread == null) {
540 evict();
541 } else {
542 evictionThread.evict();
543 }
544 }
545
546
547
548
549 void evict() {
550
551
552 if(!evictionLock.tryLock()) return;
553
554 try {
555 evictionInProgress = true;
556 long currentSize = this.size.get();
557 long bytesToFree = currentSize - minSize();
558
559 if (LOG.isTraceEnabled()) {
560 LOG.trace("Block cache LRU eviction started; Attempting to free " +
561 StringUtils.byteDesc(bytesToFree) + " of total=" +
562 StringUtils.byteDesc(currentSize));
563 }
564
565 if(bytesToFree <= 0) return;
566
567
568 BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize,
569 singleSize());
570 BlockBucket bucketMulti = new BlockBucket("multi", bytesToFree, blockSize,
571 multiSize());
572 BlockBucket bucketMemory = new BlockBucket("memory", bytesToFree, blockSize,
573 memorySize());
574
575
576 for(LruCachedBlock cachedBlock : map.values()) {
577 switch(cachedBlock.getPriority()) {
578 case SINGLE: {
579 bucketSingle.add(cachedBlock);
580 break;
581 }
582 case MULTI: {
583 bucketMulti.add(cachedBlock);
584 break;
585 }
586 case MEMORY: {
587 bucketMemory.add(cachedBlock);
588 break;
589 }
590 }
591 }
592
593 long bytesFreed = 0;
594 if (forceInMemory || memoryFactor > 0.999f) {
595 long s = bucketSingle.totalSize();
596 long m = bucketMulti.totalSize();
597 if (bytesToFree > (s + m)) {
598
599
600 bytesFreed = bucketSingle.free(s);
601 bytesFreed += bucketMulti.free(m);
602 if (LOG.isTraceEnabled()) {
603 LOG.trace("freed " + StringUtils.byteDesc(bytesFreed) +
604 " from single and multi buckets");
605 }
606 bytesFreed += bucketMemory.free(bytesToFree - bytesFreed);
607 if (LOG.isTraceEnabled()) {
608 LOG.trace("freed " + StringUtils.byteDesc(bytesFreed) +
609 " total from all three buckets ");
610 }
611 } else {
612
613
614
615 long bytesRemain = s + m - bytesToFree;
616 if (3 * s <= bytesRemain) {
617
618
619 bytesFreed = bucketMulti.free(bytesToFree);
620 } else if (3 * m <= 2 * bytesRemain) {
621
622
623 bytesFreed = bucketSingle.free(bytesToFree);
624 } else {
625
626 bytesFreed = bucketSingle.free(s - bytesRemain / 3);
627 if (bytesFreed < bytesToFree) {
628 bytesFreed += bucketMulti.free(bytesToFree - bytesFreed);
629 }
630 }
631 }
632 } else {
633 PriorityQueue<BlockBucket> bucketQueue =
634 new PriorityQueue<BlockBucket>(3);
635
636 bucketQueue.add(bucketSingle);
637 bucketQueue.add(bucketMulti);
638 bucketQueue.add(bucketMemory);
639
640 int remainingBuckets = 3;
641
642 BlockBucket bucket;
643 while((bucket = bucketQueue.poll()) != null) {
644 long overflow = bucket.overflow();
645 if(overflow > 0) {
646 long bucketBytesToFree = Math.min(overflow,
647 (bytesToFree - bytesFreed) / remainingBuckets);
648 bytesFreed += bucket.free(bucketBytesToFree);
649 }
650 remainingBuckets--;
651 }
652 }
653
654 if (LOG.isTraceEnabled()) {
655 long single = bucketSingle.totalSize();
656 long multi = bucketMulti.totalSize();
657 long memory = bucketMemory.totalSize();
658 LOG.trace("Block cache LRU eviction completed; " +
659 "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
660 "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
661 "single=" + StringUtils.byteDesc(single) + ", " +
662 "multi=" + StringUtils.byteDesc(multi) + ", " +
663 "memory=" + StringUtils.byteDesc(memory));
664 }
665 } finally {
666 stats.evict();
667 evictionInProgress = false;
668 evictionLock.unlock();
669 }
670 }
671
672 @Override
673 public String toString() {
674 return Objects.toStringHelper(this)
675 .add("blockCount", getBlockCount())
676 .add("currentSize", getCurrentSize())
677 .add("freeSize", getFreeSize())
678 .add("maxSize", getMaxSize())
679 .add("heapSize", heapSize())
680 .add("minSize", minSize())
681 .add("minFactor", minFactor)
682 .add("multiSize", multiSize())
683 .add("multiFactor", multiFactor)
684 .add("singleSize", singleSize())
685 .add("singleFactor", singleFactor)
686 .toString();
687 }
688
689
690
691
692
693
694
695 private class BlockBucket implements Comparable<BlockBucket> {
696
697 private final String name;
698 private LruCachedBlockQueue queue;
699 private long totalSize = 0;
700 private long bucketSize;
701
702 public BlockBucket(String name, long bytesToFree, long blockSize, long bucketSize) {
703 this.name = name;
704 this.bucketSize = bucketSize;
705 queue = new LruCachedBlockQueue(bytesToFree, blockSize);
706 totalSize = 0;
707 }
708
709 public void add(LruCachedBlock block) {
710 totalSize += block.heapSize();
711 queue.add(block);
712 }
713
714 public long free(long toFree) {
715 if (LOG.isTraceEnabled()) {
716 LOG.trace("freeing " + StringUtils.byteDesc(toFree) + " from " + this);
717 }
718 LruCachedBlock cb;
719 long freedBytes = 0;
720 while ((cb = queue.pollLast()) != null) {
721 freedBytes += evictBlock(cb, true);
722 if (freedBytes >= toFree) {
723 return freedBytes;
724 }
725 }
726 if (LOG.isTraceEnabled()) {
727 LOG.trace("freed " + StringUtils.byteDesc(freedBytes) + " from " + this);
728 }
729 return freedBytes;
730 }
731
732 public long overflow() {
733 return totalSize - bucketSize;
734 }
735
736 public long totalSize() {
737 return totalSize;
738 }
739
740 public int compareTo(BlockBucket that) {
741 if(this.overflow() == that.overflow()) return 0;
742 return this.overflow() > that.overflow() ? 1 : -1;
743 }
744
745 @Override
746 public boolean equals(Object that) {
747 if (that == null || !(that instanceof BlockBucket)){
748 return false;
749 }
750 return compareTo((BlockBucket)that) == 0;
751 }
752
753 @Override
754 public int hashCode() {
755 return Objects.hashCode(name, bucketSize, queue, totalSize);
756 }
757
758 @Override
759 public String toString() {
760 return Objects.toStringHelper(this)
761 .add("name", name)
762 .add("totalSize", StringUtils.byteDesc(totalSize))
763 .add("bucketSize", StringUtils.byteDesc(bucketSize))
764 .toString();
765 }
766 }
767
768
769
770
771
772 public long getMaxSize() {
773 return this.maxSize;
774 }
775
776 @Override
777 public long getCurrentSize() {
778 return this.size.get();
779 }
780
781 @Override
782 public long getFreeSize() {
783 return getMaxSize() - getCurrentSize();
784 }
785
786 @Override
787 public long size() {
788 return getMaxSize();
789 }
790
791 @Override
792 public long getBlockCount() {
793 return this.elements.get();
794 }
795
796 EvictionThread getEvictionThread() {
797 return this.evictionThread;
798 }
799
800
801
802
803
804
805
806 static class EvictionThread extends HasThread {
807 private WeakReference<LruBlockCache> cache;
808 private volatile boolean go = true;
809
810 private boolean enteringRun = false;
811
812 public EvictionThread(LruBlockCache cache) {
813 super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
814 setDaemon(true);
815 this.cache = new WeakReference<LruBlockCache>(cache);
816 }
817
818 @Override
819 public void run() {
820 enteringRun = true;
821 while (this.go) {
822 synchronized(this) {
823 try {
824 this.wait(1000 * 10
825 } catch(InterruptedException e) {
826 LOG.warn("Interrupted eviction thread ", e);
827 Thread.currentThread().interrupt();
828 }
829 }
830 LruBlockCache cache = this.cache.get();
831 if (cache == null) break;
832 cache.evict();
833 }
834 }
835
836 @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NN_NAKED_NOTIFY",
837 justification="This is what we want")
838 public void evict() {
839 synchronized(this) {
840 this.notifyAll();
841 }
842 }
843
844 synchronized void shutdown() {
845 this.go = false;
846 this.notifyAll();
847 }
848
849
850
851
852 boolean isEnteringRun() {
853 return this.enteringRun;
854 }
855 }
856
857
858
859
860 static class StatisticsThread extends Thread {
861 private final LruBlockCache lru;
862
863 public StatisticsThread(LruBlockCache lru) {
864 super("LruBlockCacheStats");
865 setDaemon(true);
866 this.lru = lru;
867 }
868
869 @Override
870 public void run() {
871 lru.logStats();
872 }
873 }
874
875 public void logStats() {
876
877 long totalSize = heapSize();
878 long freeSize = maxSize - totalSize;
879 LruBlockCache.LOG.info("totalSize=" + StringUtils.byteDesc(totalSize) + ", " +
880 "freeSize=" + StringUtils.byteDesc(freeSize) + ", " +
881 "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
882 "blockCount=" + getBlockCount() + ", " +
883 "accesses=" + stats.getRequestCount() + ", " +
884 "hits=" + stats.getHitCount() + ", " +
885 "hitRatio=" + (stats.getHitCount() == 0 ?
886 "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
887 "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
888 "cachingHits=" + stats.getHitCachingCount() + ", " +
889 "cachingHitsRatio=" + (stats.getHitCachingCount() == 0 ?
890 "0,": (StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + ", ")) +
891 "evictions=" + stats.getEvictionCount() + ", " +
892 "evicted=" + stats.getEvictedCount() + ", " +
893 "evictedPerRun=" + stats.evictedPerEviction());
894 }
895
896
897
898
899
900
901
902 public CacheStats getStats() {
903 return this.stats;
904 }
905
906 public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
907 (3 * Bytes.SIZEOF_LONG) + (10 * ClassSize.REFERENCE) +
908 (5 * Bytes.SIZEOF_FLOAT) + (2 * Bytes.SIZEOF_BOOLEAN)
909 + ClassSize.OBJECT);
910
911 @Override
912 public long heapSize() {
913 return getCurrentSize();
914 }
915
916 public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
917
918 return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
919 ((long)Math.ceil(maxSize*1.2/blockSize)
920 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
921 ((long)concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
922 }
923
924 @Override
925 public Iterator<CachedBlock> iterator() {
926 final Iterator<LruCachedBlock> iterator = map.values().iterator();
927
928 return new Iterator<CachedBlock>() {
929 private final long now = System.nanoTime();
930
931 @Override
932 public boolean hasNext() {
933 return iterator.hasNext();
934 }
935
936 @Override
937 public CachedBlock next() {
938 final LruCachedBlock b = iterator.next();
939 return new CachedBlock() {
940 @Override
941 public String toString() {
942 return BlockCacheUtil.toString(this, now);
943 }
944
945 @Override
946 public BlockPriority getBlockPriority() {
947 return b.getPriority();
948 }
949
950 @Override
951 public BlockType getBlockType() {
952 return b.getBuffer().getBlockType();
953 }
954
955 @Override
956 public long getOffset() {
957 return b.getCacheKey().getOffset();
958 }
959
960 @Override
961 public long getSize() {
962 return b.getBuffer().heapSize();
963 }
964
965 @Override
966 public long getCachedTime() {
967 return b.getCachedTime();
968 }
969
970 @Override
971 public String getFilename() {
972 return b.getCacheKey().getHfileName();
973 }
974
975 @Override
976 public int compareTo(CachedBlock other) {
977 int diff = this.getFilename().compareTo(other.getFilename());
978 if (diff != 0) return diff;
979 diff = (int)(this.getOffset() - other.getOffset());
980 if (diff != 0) return diff;
981 if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
982 throw new IllegalStateException("" + this.getCachedTime() + ", " +
983 other.getCachedTime());
984 }
985 return (int)(other.getCachedTime() - this.getCachedTime());
986 }
987
988 @Override
989 public int hashCode() {
990 return b.hashCode();
991 }
992
993 @Override
994 public boolean equals(Object obj) {
995 if (obj instanceof CachedBlock) {
996 CachedBlock cb = (CachedBlock)obj;
997 return compareTo(cb) == 0;
998 } else {
999 return false;
1000 }
1001 }
1002 };
1003 }
1004
1005 @Override
1006 public void remove() {
1007 throw new UnsupportedOperationException();
1008 }
1009 };
1010 }
1011
1012
1013
1014 long acceptableSize() {
1015 return (long)Math.floor(this.maxSize * this.acceptableFactor);
1016 }
1017 private long minSize() {
1018 return (long)Math.floor(this.maxSize * this.minFactor);
1019 }
1020 private long singleSize() {
1021 return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor);
1022 }
1023 private long multiSize() {
1024 return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor);
1025 }
1026 private long memorySize() {
1027 return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
1028 }
1029
1030 public void shutdown() {
1031 if (victimHandler != null)
1032 victimHandler.shutdown();
1033 this.scheduleThreadPool.shutdown();
1034 for (int i = 0; i < 10; i++) {
1035 if (!this.scheduleThreadPool.isShutdown()) {
1036 try {
1037 Thread.sleep(10);
1038 } catch (InterruptedException e) {
1039 LOG.warn("Interrupted while sleeping");
1040 Thread.currentThread().interrupt();
1041 break;
1042 }
1043 }
1044 }
1045
1046 if (!this.scheduleThreadPool.isShutdown()) {
1047 List<Runnable> runnables = this.scheduleThreadPool.shutdownNow();
1048 LOG.debug("Still running " + runnables);
1049 }
1050 this.evictionThread.shutdown();
1051 }
1052
1053
1054 @VisibleForTesting
1055 public void clearCache() {
1056 this.map.clear();
1057 this.elements.set(0);
1058 }
1059
1060
1061
1062
1063
1064 @VisibleForTesting
1065 SortedSet<String> getCachedFileNamesForTest() {
1066 SortedSet<String> fileNames = new TreeSet<String>();
1067 for (BlockCacheKey cacheKey : map.keySet()) {
1068 fileNames.add(cacheKey.getHfileName());
1069 }
1070 return fileNames;
1071 }
1072
1073 @VisibleForTesting
1074 Map<BlockType, Integer> getBlockTypeCountsForTest() {
1075 Map<BlockType, Integer> counts =
1076 new EnumMap<BlockType, Integer>(BlockType.class);
1077 for (LruCachedBlock cb : map.values()) {
1078 BlockType blockType = ((Cacheable)cb.getBuffer()).getBlockType();
1079 Integer count = counts.get(blockType);
1080 counts.put(blockType, (count == null ? 0 : count) + 1);
1081 }
1082 return counts;
1083 }
1084
1085 @VisibleForTesting
1086 public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
1087 Map<DataBlockEncoding, Integer> counts =
1088 new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
1089 for (LruCachedBlock block : map.values()) {
1090 DataBlockEncoding encoding =
1091 ((HFileBlock) block.getBuffer()).getDataBlockEncoding();
1092 Integer count = counts.get(encoding);
1093 counts.put(encoding, (count == null ? 0 : count) + 1);
1094 }
1095 return counts;
1096 }
1097
1098 public void setVictimCache(BlockCache handler) {
1099 assert victimHandler == null;
1100 victimHandler = handler;
1101 }
1102
1103 @VisibleForTesting
1104 Map<BlockCacheKey, LruCachedBlock> getMapForTests() {
1105 return map;
1106 }
1107
1108 BlockCache getVictimHandler() {
1109 return this.victimHandler;
1110 }
1111
1112 @Override
1113 public BlockCache[] getBlockCaches() {
1114 return null;
1115 }
1116 }