View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.io.hfile;
19  
20  import static org.junit.Assert.*;
21  
22  import java.io.IOException;
23  import java.util.Random;
24  
25  import org.apache.hadoop.conf.Configuration;
26  import org.apache.hadoop.fs.FileSystem;
27  import org.apache.hadoop.fs.Path;
28  import org.apache.hadoop.hbase.HBaseTestingUtility;
29  import org.apache.hadoop.hbase.KeyValue;
30  import org.apache.hadoop.hbase.testclassification.SmallTests;
31  import org.apache.hadoop.hbase.fs.HFileSystem;
32  import org.apache.hadoop.hbase.regionserver.StoreFile;
33  
34  import org.junit.Before;
35  import org.junit.Test;
36  import org.junit.experimental.categories.Category;
37  
38  @Category(SmallTests.class)
39  public class TestPrefetch {
40  
41    private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
42  
43    private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2;
44    private static final int DATA_BLOCK_SIZE = 2048;
45    private static final int NUM_KV = 1000;
46    private static final Random RNG = new Random();
47  
48    private Configuration conf;
49    private CacheConfig cacheConf;
50    private FileSystem fs;
51  
52    @Before
53    public void setUp() throws IOException {
54      conf = TEST_UTIL.getConfiguration();
55      conf.setInt(HFile.FORMAT_VERSION_KEY, 3);
56      conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);
57      fs = HFileSystem.get(conf);
58      CacheConfig.blockCacheDisabled = false;
59      cacheConf = new CacheConfig(conf);
60    }
61  
62    @Test(timeout=60000)
63    public void testPrefetch() throws Exception {
64      Path storeFile = writeStoreFile();
65      readStoreFile(storeFile);
66    }
67  
68    private void readStoreFile(Path storeFilePath) throws Exception {
69      // Open the file
70      HFileReaderV2 reader = (HFileReaderV2) HFile.createReader(fs,
71        storeFilePath, cacheConf, conf);
72  
73      while (!((HFileReaderV3)reader).prefetchComplete()) {
74        // Sleep for a bit
75        Thread.sleep(1000);
76      }
77  
78      // Check that all of the data blocks were preloaded
79      BlockCache blockCache = cacheConf.getBlockCache();
80      long offset = 0;
81      HFileBlock prevBlock = null;
82      while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
83        long onDiskSize = -1;
84        if (prevBlock != null) {
85           onDiskSize = prevBlock.getNextBlockOnDiskSizeWithHeader();
86        }
87        HFileBlock block = reader.readBlock(offset, onDiskSize, false, true, false, true, null,
88          null);
89        BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset);
90        boolean isCached = blockCache.getBlock(blockCacheKey, true, false, true) != null;
91        if (block.getBlockType() == BlockType.DATA ||
92            block.getBlockType() == BlockType.ROOT_INDEX ||
93            block.getBlockType() == BlockType.INTERMEDIATE_INDEX) {
94          assertTrue(isCached);
95        }
96        prevBlock = block;
97        offset += block.getOnDiskSizeWithHeader();
98      }
99    }
100 
101   private Path writeStoreFile() throws IOException {
102     Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "TestPrefetch");
103     HFileContext meta = new HFileContextBuilder()
104       .withBlockSize(DATA_BLOCK_SIZE)
105       .build();
106     StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs)
107       .withOutputDir(storeFileParentDir)
108       .withComparator(KeyValue.COMPARATOR)
109       .withFileContext(meta)
110       .build();
111 
112     final int rowLen = 32;
113     for (int i = 0; i < NUM_KV; ++i) {
114       byte[] k = TestHFileWriterV2.randomOrderedKey(RNG, i);
115       byte[] v = TestHFileWriterV2.randomValue(RNG);
116       int cfLen = RNG.nextInt(k.length - rowLen + 1);
117       KeyValue kv = new KeyValue(
118           k, 0, rowLen,
119           k, rowLen, cfLen,
120           k, rowLen + cfLen, k.length - rowLen - cfLen,
121           RNG.nextLong(),
122           generateKeyType(RNG),
123           v, 0, v.length);
124       sfw.append(kv);
125     }
126 
127     sfw.close();
128     return sfw.getPath();
129   }
130 
131   public static KeyValue.Type generateKeyType(Random rand) {
132     if (rand.nextBoolean()) {
133       // Let's make half of KVs puts.
134       return KeyValue.Type.Put;
135     } else {
136       KeyValue.Type keyType =
137           KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];
138       if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)
139       {
140         throw new RuntimeException("Generated an invalid key type: " + keyType
141             + ". " + "Probably the layout of KeyValue.Type has changed.");
142       }
143       return keyType;
144     }
145   }
146 
147 }