1   /**
2    * Copyright 2009 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.io.hfile;
21  
22  import static org.junit.Assert.assertEquals;
23  import static org.junit.Assert.assertTrue;
24  
25  import java.nio.ByteBuffer;
26  import java.util.Collection;
27  import java.util.Map;
28  import java.util.Random;
29  
30  import org.apache.hadoop.conf.Configuration;
31  import org.apache.hadoop.hbase.HBaseTestingUtility;
32  import org.apache.hadoop.hbase.MediumTests;
33  import org.apache.hadoop.hbase.io.HeapSize;
34  import org.apache.hadoop.hbase.io.hfile.LruBlockCache.EvictionThread;
35  import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
36  import org.apache.hadoop.hbase.regionserver.metrics.TestSchemaMetrics;
37  import org.apache.hadoop.hbase.util.ClassSize;
38  import org.junit.After;
39  import org.junit.Before;
40  import org.junit.Test;
41  import org.junit.experimental.categories.Category;
42  import org.junit.runner.RunWith;
43  import org.junit.runners.Parameterized;
44  import org.junit.runners.Parameterized.Parameters;
45  
46  /**
47   * Tests the concurrent LruBlockCache.<p>
48   *
49   * Tests will ensure it grows and shrinks in size properly,
50   * evictions run when they're supposed to and do what they should,
51   * and that cached blocks are accessible when expected to be.
52   */
53  @RunWith(Parameterized.class)
54  @Category(MediumTests.class)
55  public class TestLruBlockCache {
56  
57    private Map<String, Long> startingMetrics;
58    private final HBaseTestingUtility TEST_UTIL =
59        new HBaseTestingUtility();
60  
61    public TestLruBlockCache(boolean useTableName) {
62      SchemaMetrics.setUseTableNameInTest(useTableName);
63    }
64  
65    @Parameters
66    public static Collection<Object[]> parameters() {
67      return TestSchemaMetrics.parameters();
68    }
69  
70    @Before
71    public void setUp() throws Exception {
72      startingMetrics = SchemaMetrics.getMetricsSnapshot();
73    }
74  
75    @After
76    public void tearDown() throws Exception {
77      SchemaMetrics.validateMetricChanges(startingMetrics);
78    }
79  
80    @Test
81    public void testBackgroundEvictionThread() throws Exception {
82      long maxSize = 100000;
83      long blockSize = calculateBlockSizeDefault(maxSize, 9); // room for 9, will evict
84  
85      LruBlockCache cache = new LruBlockCache(maxSize, blockSize, TEST_UTIL.getConfiguration());
86  
87      CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block");
88  
89      EvictionThread evictionThread = cache.getEvictionThread();
90      assertTrue(evictionThread != null);
91  
92      // Make sure eviction thread has entered run method
93      while (!evictionThread.isEnteringRun()) {
94        Thread.sleep(1);
95      }
96  
97      // Add all the blocks
98      for (CachedItem block : blocks) {
99        cache.cacheBlock(block.cacheKey, block);
100     }
101 
102     // Let the eviction run
103     int n = 0;
104     while(cache.getEvictionCount() == 0) {
105       Thread.sleep(200);
106       assertTrue(n++ < 20);
107     }
108     System.out.println("Background Evictions run: " + cache.getEvictionCount());
109 
110     // A single eviction run should have occurred
111     assertEquals(cache.getEvictionCount(), 1);
112   }
113 
114   @Test
115   public void testCacheSimple() throws Exception {
116 
117     long maxSize = 1000000;
118     long blockSize = calculateBlockSizeDefault(maxSize, 101);
119 
120     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, TEST_UTIL.getConfiguration());
121 
122     CachedItem [] blocks = generateRandomBlocks(100, blockSize);
123 
124     long expectedCacheSize = cache.heapSize();
125 
126     // Confirm empty
127     for (CachedItem block : blocks) {
128       assertTrue(cache.getBlock(block.cacheKey, true, false) == null);
129     }
130 
131     // Add blocks
132     for (CachedItem block : blocks) {
133       cache.cacheBlock(block.cacheKey, block);
134       expectedCacheSize += block.cacheBlockHeapSize();
135     }
136 
137     // Verify correctly calculated cache heap size
138     assertEquals(expectedCacheSize, cache.heapSize());
139 
140     // Check if all blocks are properly cached and retrieved
141     for (CachedItem block : blocks) {
142       HeapSize buf = cache.getBlock(block.cacheKey, true, false);
143       assertTrue(buf != null);
144       assertEquals(buf.heapSize(), block.heapSize());
145     }
146 
147     // Re-add same blocks and ensure nothing has changed
148     for (CachedItem block : blocks) {
149       try {
150         cache.cacheBlock(block.cacheKey, block);
151         assertTrue("Cache should not allow re-caching a block", false);
152       } catch(RuntimeException re) {
153         // expected
154       }
155     }
156 
157     // Verify correctly calculated cache heap size
158     assertEquals(expectedCacheSize, cache.heapSize());
159 
160     // Check if all blocks are properly cached and retrieved
161     for (CachedItem block : blocks) {
162       HeapSize buf = cache.getBlock(block.cacheKey, true, false);
163       assertTrue(buf != null);
164       assertEquals(buf.heapSize(), block.heapSize());
165     }
166 
167     // Expect no evictions
168     assertEquals(0, cache.getEvictionCount());
169     Thread t = new LruBlockCache.StatisticsThread(cache);
170     t.start();
171     t.join();
172   }
173 
174   @Test
175   public void testCacheEvictionSimple() throws Exception {
176 
177     long maxSize = 100000;
178     long blockSize = calculateBlockSizeDefault(maxSize, 10);
179 
180     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, TEST_UTIL.getConfiguration());
181 
182     CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block");
183 
184     long expectedCacheSize = cache.heapSize();
185 
186     // Add all the blocks
187     for (CachedItem block : blocks) {
188       cache.cacheBlock(block.cacheKey, block);
189       expectedCacheSize += block.cacheBlockHeapSize();
190     }
191 
192     // A single eviction run should have occurred
193     assertEquals(1, cache.getEvictionCount());
194 
195     // Our expected size overruns acceptable limit
196     assertTrue(expectedCacheSize >
197       (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
198 
199     // But the cache did not grow beyond max
200     assertTrue(cache.heapSize() < maxSize);
201 
202     // And is still below the acceptable limit
203     assertTrue(cache.heapSize() <
204         (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
205 
206     // All blocks except block 0 and 1 should be in the cache
207     assertTrue(cache.getBlock(blocks[0].cacheKey, true, false) == null);
208     assertTrue(cache.getBlock(blocks[1].cacheKey, true, false) == null);
209     for(int i=2;i<blocks.length;i++) {
210       assertEquals(cache.getBlock(blocks[i].cacheKey, true, false),
211           blocks[i]);
212     }
213   }
214 
215   @Test
216   public void testCacheEvictionTwoPriorities() throws Exception {
217 
218     long maxSize = 100000;
219     long blockSize = calculateBlockSizeDefault(maxSize, 10);
220 
221     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, TEST_UTIL.getConfiguration());
222 
223     CachedItem [] singleBlocks = generateFixedBlocks(5, 10000, "single");
224     CachedItem [] multiBlocks = generateFixedBlocks(5, 10000, "multi");
225 
226     long expectedCacheSize = cache.heapSize();
227 
228     // Add and get the multi blocks
229     for (CachedItem block : multiBlocks) {
230       cache.cacheBlock(block.cacheKey, block);
231       expectedCacheSize += block.cacheBlockHeapSize();
232       assertEquals(cache.getBlock(block.cacheKey, true, false), block);
233     }
234 
235     // Add the single blocks (no get)
236     for (CachedItem block : singleBlocks) {
237       cache.cacheBlock(block.cacheKey, block);
238       expectedCacheSize += block.heapSize();
239     }
240 
241     // A single eviction run should have occurred
242     assertEquals(cache.getEvictionCount(), 1);
243 
244     // We expect two entries evicted
245     assertEquals(cache.getEvictedCount(), 2);
246 
247     // Our expected size overruns acceptable limit
248     assertTrue(expectedCacheSize >
249       (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
250 
251     // But the cache did not grow beyond max
252     assertTrue(cache.heapSize() <= maxSize);
253 
254     // And is now below the acceptable limit
255     assertTrue(cache.heapSize() <=
256         (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
257 
258     // We expect fairness across the two priorities.
259     // This test makes multi go barely over its limit, in-memory
260     // empty, and the rest in single.  Two single evictions and
261     // one multi eviction expected.
262     assertTrue(cache.getBlock(singleBlocks[0].cacheKey, true, false) == null);
263     assertTrue(cache.getBlock(multiBlocks[0].cacheKey, true, false) == null);
264 
265     // And all others to be cached
266     for(int i=1;i<4;i++) {
267       assertEquals(cache.getBlock(singleBlocks[i].cacheKey, true, false),
268           singleBlocks[i]);
269       assertEquals(cache.getBlock(multiBlocks[i].cacheKey, true, false),
270           multiBlocks[i]);
271     }
272   }
273 
274   @Test
275   public void testCacheEvictionThreePriorities() throws Exception {
276 
277     long maxSize = 100000;
278     long blockSize = calculateBlockSize(maxSize, 10);
279 
280     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
281         (int)Math.ceil(1.2*maxSize/blockSize),
282         LruBlockCache.DEFAULT_LOAD_FACTOR,
283         LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
284         0.98f, // min
285         0.99f, // acceptable
286         0.33f, // single
287         0.33f, // multi
288         0.34f);// memory
289 
290 
291     CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single");
292     CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
293     CachedItem [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory");
294 
295     long expectedCacheSize = cache.heapSize();
296 
297     // Add 3 blocks from each priority
298     for(int i=0;i<3;i++) {
299 
300       // Just add single blocks
301       cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
302       expectedCacheSize += singleBlocks[i].cacheBlockHeapSize();
303 
304       // Add and get multi blocks
305       cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
306       expectedCacheSize += multiBlocks[i].cacheBlockHeapSize();
307       cache.getBlock(multiBlocks[i].cacheKey, true, false);
308 
309       // Add memory blocks as such
310       cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true);
311       expectedCacheSize += memoryBlocks[i].cacheBlockHeapSize();
312 
313     }
314 
315     // Do not expect any evictions yet
316     assertEquals(0, cache.getEvictionCount());
317 
318     // Verify cache size
319     assertEquals(expectedCacheSize, cache.heapSize());
320 
321     // Insert a single block, oldest single should be evicted
322     cache.cacheBlock(singleBlocks[3].cacheKey, singleBlocks[3]);
323 
324     // Single eviction, one thing evicted
325     assertEquals(1, cache.getEvictionCount());
326     assertEquals(1, cache.getEvictedCount());
327 
328     // Verify oldest single block is the one evicted
329     assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false));
330 
331     // Change the oldest remaining single block to a multi
332     cache.getBlock(singleBlocks[1].cacheKey, true, false);
333 
334     // Insert another single block
335     cache.cacheBlock(singleBlocks[4].cacheKey, singleBlocks[4]);
336 
337     // Two evictions, two evicted.
338     assertEquals(2, cache.getEvictionCount());
339     assertEquals(2, cache.getEvictedCount());
340 
341     // Oldest multi block should be evicted now
342     assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false));
343 
344     // Insert another memory block
345     cache.cacheBlock(memoryBlocks[3].cacheKey, memoryBlocks[3], true);
346 
347     // Three evictions, three evicted.
348     assertEquals(3, cache.getEvictionCount());
349     assertEquals(3, cache.getEvictedCount());
350 
351     // Oldest memory block should be evicted now
352     assertEquals(null, cache.getBlock(memoryBlocks[0].cacheKey, true, false));
353 
354     // Add a block that is twice as big (should force two evictions)
355     CachedItem [] bigBlocks = generateFixedBlocks(3, blockSize*3, "big");
356     cache.cacheBlock(bigBlocks[0].cacheKey, bigBlocks[0]);
357 
358     // Four evictions, six evicted (inserted block 3X size, expect +3 evicted)
359     assertEquals(4, cache.getEvictionCount());
360     assertEquals(6, cache.getEvictedCount());
361 
362     // Expect three remaining singles to be evicted
363     assertEquals(null, cache.getBlock(singleBlocks[2].cacheKey, true, false));
364     assertEquals(null, cache.getBlock(singleBlocks[3].cacheKey, true, false));
365     assertEquals(null, cache.getBlock(singleBlocks[4].cacheKey, true, false));
366 
367     // Make the big block a multi block
368     cache.getBlock(bigBlocks[0].cacheKey, true, false);
369 
370     // Cache another single big block
371     cache.cacheBlock(bigBlocks[1].cacheKey, bigBlocks[1]);
372 
373     // Five evictions, nine evicted (3 new)
374     assertEquals(5, cache.getEvictionCount());
375     assertEquals(9, cache.getEvictedCount());
376 
377     // Expect three remaining multis to be evicted
378     assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false));
379     assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false));
380     assertEquals(null, cache.getBlock(multiBlocks[2].cacheKey, true, false));
381 
382     // Cache a big memory block
383     cache.cacheBlock(bigBlocks[2].cacheKey, bigBlocks[2], true);
384 
385     // Six evictions, twelve evicted (3 new)
386     assertEquals(6, cache.getEvictionCount());
387     assertEquals(12, cache.getEvictedCount());
388 
389     // Expect three remaining in-memory to be evicted
390     assertEquals(null, cache.getBlock(memoryBlocks[1].cacheKey, true, false));
391     assertEquals(null, cache.getBlock(memoryBlocks[2].cacheKey, true, false));
392     assertEquals(null, cache.getBlock(memoryBlocks[3].cacheKey, true, false));
393 
394 
395   }
396 
397   // test scan resistance
398   @Test
399   public void testScanResistance() throws Exception {
400 
401     long maxSize = 100000;
402     long blockSize = calculateBlockSize(maxSize, 10);
403 
404     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
405         (int)Math.ceil(1.2*maxSize/blockSize),
406         LruBlockCache.DEFAULT_LOAD_FACTOR,
407         LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
408         0.66f, // min
409         0.99f, // acceptable
410         0.33f, // single
411         0.33f, // multi
412         0.34f);// memory
413 
414     CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single");
415     CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
416 
417     // Add 5 multi blocks
418     for (CachedItem block : multiBlocks) {
419       cache.cacheBlock(block.cacheKey, block);
420       cache.getBlock(block.cacheKey, true, false);
421     }
422 
423     // Add 5 single blocks
424     for(int i=0;i<5;i++) {
425       cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
426     }
427 
428     // An eviction ran
429     assertEquals(1, cache.getEvictionCount());
430 
431     // To drop down to 2/3 capacity, we'll need to evict 4 blocks
432     assertEquals(4, cache.getEvictedCount());
433 
434     // Should have been taken off equally from single and multi
435     assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false));
436     assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false));
437     assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false));
438     assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false));
439 
440     // Let's keep "scanning" by adding single blocks.  From here on we only
441     // expect evictions from the single bucket.
442 
443     // Every time we reach 10 total blocks (every 4 inserts) we get 4 single
444     // blocks evicted.  Inserting 13 blocks should yield 3 more evictions and
445     // 12 more evicted.
446 
447     for(int i=5;i<18;i++) {
448       cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
449     }
450 
451     // 4 total evictions, 16 total evicted
452     assertEquals(4, cache.getEvictionCount());
453     assertEquals(16, cache.getEvictedCount());
454 
455     // Should now have 7 total blocks
456     assertEquals(7, cache.size());
457 
458   }
459 
460   // test setMaxSize
461   @Test
462   public void testResizeBlockCache() throws Exception {
463 
464     long maxSize = 300000;
465     long blockSize = calculateBlockSize(maxSize, 31);
466 
467     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
468         (int)Math.ceil(1.2*maxSize/blockSize),
469         LruBlockCache.DEFAULT_LOAD_FACTOR,
470         LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
471         0.98f, // min
472         0.99f, // acceptable
473         0.33f, // single
474         0.33f, // multi
475         0.34f);// memory
476 
477     CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single");
478     CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi");
479     CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory");
480 
481     // Add all blocks from all priorities
482     for(int i=0;i<10;i++) {
483 
484       // Just add single blocks
485       cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
486 
487       // Add and get multi blocks
488       cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
489       cache.getBlock(multiBlocks[i].cacheKey, true, false);
490 
491       // Add memory blocks as such
492       cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true);
493     }
494 
495     // Do not expect any evictions yet
496     assertEquals(0, cache.getEvictionCount());
497 
498     // Resize to half capacity plus an extra block (otherwise we evict an extra)
499     cache.setMaxSize((long)(maxSize * 0.5f));
500 
501     // Should have run a single eviction
502     assertEquals(1, cache.getEvictionCount());
503 
504     // And we expect 1/2 of the blocks to be evicted
505     assertEquals(15, cache.getEvictedCount());
506 
507     // And the oldest 5 blocks from each category should be gone
508     for(int i=0;i<5;i++) {
509       assertEquals(null, cache.getBlock(singleBlocks[i].cacheKey, true, false));
510       assertEquals(null, cache.getBlock(multiBlocks[i].cacheKey, true, false));
511       assertEquals(null, cache.getBlock(memoryBlocks[i].cacheKey, true, false));
512     }
513 
514     // And the newest 5 blocks should still be accessible
515     for(int i=5;i<10;i++) {
516       assertEquals(singleBlocks[i], cache.getBlock(singleBlocks[i].cacheKey, true, false));
517       assertEquals(multiBlocks[i], cache.getBlock(multiBlocks[i].cacheKey, true, false));
518       assertEquals(memoryBlocks[i], cache.getBlock(memoryBlocks[i].cacheKey, true, false));
519     }
520   }
521 
522   // test metricsPastNPeriods
523   @Test
524   public void testPastNPeriodsMetrics() throws Exception {
525    double delta = 0.01;
526 
527     // 3 total periods
528     CacheStats stats = new CacheStats(3);
529 
530     // No accesses, should be 0
531     stats.rollMetricsPeriod();
532     assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta);
533     assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
534 
535     // period 1, 1 hit caching, 1 hit non-caching, 2 miss non-caching
536     // should be (2/4)=0.5 and (1/1)=1
537     stats.hit(false);
538     stats.hit(true);
539     stats.miss(false);
540     stats.miss(false);
541     stats.rollMetricsPeriod();
542     assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
543     assertEquals(1.0, stats.getHitCachingRatioPastNPeriods(), delta);
544 
545     // period 2, 1 miss caching, 3 miss non-caching
546     // should be (2/8)=0.25 and (1/2)=0.5
547     stats.miss(true);
548     stats.miss(false);
549     stats.miss(false);
550     stats.miss(false);
551     stats.rollMetricsPeriod();
552     assertEquals(0.25, stats.getHitRatioPastNPeriods(), delta);
553     assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta);
554 
555     // period 3, 2 hits of each type
556     // should be (6/12)=0.5 and (3/4)=0.75
557     stats.hit(false);
558     stats.hit(true);
559     stats.hit(false);
560     stats.hit(true);
561     stats.rollMetricsPeriod();
562     assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
563     assertEquals(0.75, stats.getHitCachingRatioPastNPeriods(), delta);
564 
565     // period 4, evict period 1, two caching misses
566     // should be (4/10)=0.4 and (2/5)=0.4
567     stats.miss(true);
568     stats.miss(true);
569     stats.rollMetricsPeriod();
570     assertEquals(0.4, stats.getHitRatioPastNPeriods(), delta);
571     assertEquals(0.4, stats.getHitCachingRatioPastNPeriods(), delta);
572 
573     // period 5, evict period 2, 2 caching misses, 2 non-caching hit
574     // should be (6/10)=0.6 and (2/6)=1/3
575     stats.miss(true);
576     stats.miss(true);
577     stats.hit(false);
578     stats.hit(false);
579     stats.rollMetricsPeriod();
580     assertEquals(0.6, stats.getHitRatioPastNPeriods(), delta);
581     assertEquals((double)1/3, stats.getHitCachingRatioPastNPeriods(), delta);
582 
583     // period 6, evict period 3
584     // should be (2/6)=1/3 and (0/4)=0
585     stats.rollMetricsPeriod();
586     assertEquals((double)1/3, stats.getHitRatioPastNPeriods(), delta);
587     assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
588 
589     // period 7, evict period 4
590     // should be (2/4)=0.5 and (0/2)=0
591     stats.rollMetricsPeriod();
592     assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
593     assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
594 
595     // period 8, evict period 5
596     // should be 0 and 0
597     stats.rollMetricsPeriod();
598     assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta);
599     assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
600 
601     // period 9, one of each
602     // should be (2/4)=0.5 and (1/2)=0.5
603     stats.miss(true);
604     stats.miss(false);
605     stats.hit(true);
606     stats.hit(false);
607     stats.rollMetricsPeriod();
608     assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
609     assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta);
610   }
611 
612   private CachedItem [] generateFixedBlocks(int numBlocks, int size, String pfx) {
613     CachedItem [] blocks = new CachedItem[numBlocks];
614     for(int i=0;i<numBlocks;i++) {
615       blocks[i] = new CachedItem(pfx + i, size);
616     }
617     return blocks;
618   }
619 
620   private CachedItem [] generateFixedBlocks(int numBlocks, long size, String pfx) {
621     return generateFixedBlocks(numBlocks, (int)size, pfx);
622   }
623 
624   private CachedItem [] generateRandomBlocks(int numBlocks, long maxSize) {
625     CachedItem [] blocks = new CachedItem[numBlocks];
626     Random r = new Random();
627     for(int i=0;i<numBlocks;i++) {
628       blocks[i] = new CachedItem("block" + i, r.nextInt((int)maxSize)+1);
629     }
630     return blocks;
631   }
632 
633   private long calculateBlockSize(long maxSize, int numBlocks) {
634     long roughBlockSize = maxSize / numBlocks;
635     int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
636     long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
637         ClassSize.CONCURRENT_HASHMAP +
638         (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
639         (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
640     long negateBlockSize = (long)(totalOverhead/numEntries);
641     negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD;
642     return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*0.99f));
643   }
644 
645   private long calculateBlockSizeDefault(long maxSize, int numBlocks) {
646     long roughBlockSize = maxSize / numBlocks;
647     int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
648     long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
649         ClassSize.CONCURRENT_HASHMAP +
650         (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
651         (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
652     long negateBlockSize = totalOverhead / numEntries;
653     negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD;
654     return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*
655         LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
656   }
657 
658   private static class CachedItem implements Cacheable {
659     BlockCacheKey cacheKey;
660     int size;
661 
662     CachedItem(String blockName, int size) {
663       this.cacheKey = new BlockCacheKey(blockName, 0);
664       this.size = size;
665     }
666 
667     /** The size of this item reported to the block cache layer */
668     @Override
669     public long heapSize() {
670       return ClassSize.align(size);
671     }
672 
673     /** Size of the cache block holding this item. Used for verification. */
674     public long cacheBlockHeapSize() {
675       return CachedBlock.PER_BLOCK_OVERHEAD
676           + ClassSize.align(cacheKey.heapSize())
677           + ClassSize.align(size);
678     }
679 
680     @Override
681     public BlockType getBlockType() {
682       return BlockType.DATA;
683     }
684 
685     @Override
686     public SchemaMetrics getSchemaMetrics() {
687       return SchemaMetrics.getUnknownInstanceForTest();
688     }
689 
690     @Override
691     public int getSerializedLength() {
692       return 0;
693     }
694 
695     @Override
696     public CacheableDeserializer<Cacheable> getDeserializer() {
697       return null;
698     }
699 
700     @Override
701     public void serialize(ByteBuffer destination) {
702     }
703 
704   }
705 
706   @org.junit.Rule
707   public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
708     new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
709 }
710