View Javadoc

1   /*
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.io.hfile;
20  
21  import java.io.ByteArrayInputStream;
22  import java.io.ByteArrayOutputStream;
23  import java.io.DataInputStream;
24  import java.io.DataOutputStream;
25  import java.io.IOException;
26  import java.util.ArrayList;
27  import java.util.Collection;
28  import java.util.List;
29  
30  import org.apache.hadoop.hbase.*;
31  import org.apache.hadoop.hbase.testclassification.SmallTests;
32  import org.apache.hadoop.hbase.util.Bytes;
33  import org.junit.Before;
34  import org.junit.Test;
35  import org.junit.experimental.categories.Category;
36  import org.junit.runner.RunWith;
37  import org.junit.runners.Parameterized;
38  import org.junit.runners.Parameterized.Parameters;
39  
40  import static org.junit.Assert.*;
41  
42  import org.apache.commons.logging.Log;
43  import org.apache.commons.logging.LogFactory;
44  import org.apache.hadoop.fs.FSDataInputStream;
45  import org.apache.hadoop.fs.FSDataOutputStream;
46  import org.apache.hadoop.fs.FileSystem;
47  import org.apache.hadoop.fs.Path;
48  
49  @RunWith(Parameterized.class)
50  @Category(SmallTests.class)
51  public class TestFixedFileTrailer {
52  
53    private static final Log LOG = LogFactory.getLog(TestFixedFileTrailer.class);
54    private static final int MAX_COMPARATOR_NAME_LENGTH = 128;
55  
56    /**
57     * The number of used fields by version. Indexed by version minus two. 
58     * Min version that we support is V2
59     */
60    private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14, 15 };
61  
62    private HBaseTestingUtility util = new HBaseTestingUtility();
63    private FileSystem fs;
64    private ByteArrayOutputStream baos = new ByteArrayOutputStream();
65    private int version;
66  
67    static {
68      assert NUM_FIELDS_BY_VERSION.length == HFile.MAX_FORMAT_VERSION
69          - HFile.MIN_FORMAT_VERSION + 1;
70    }
71  
72    public TestFixedFileTrailer(int version) {
73      this.version = version;
74    }
75  
76    @Parameters
77    public static Collection<Object[]> getParameters() {
78      List<Object[]> versionsToTest = new ArrayList<Object[]>();
79      for (int v = HFile.MIN_FORMAT_VERSION; v <= HFile.MAX_FORMAT_VERSION; ++v)
80        versionsToTest.add(new Integer[] { v } );
81      return versionsToTest;
82    }
83  
84    @Before
85    public void setUp() throws IOException {
86      fs = FileSystem.get(util.getConfiguration());
87    }
88  
89    @Test
90    public void testTrailer() throws IOException {
91      FixedFileTrailer t = new FixedFileTrailer(version, 
92          HFileReaderV2.PBUF_TRAILER_MINOR_VERSION);
93      t.setDataIndexCount(3);
94      t.setEntryCount(((long) Integer.MAX_VALUE) + 1);
95  
96      t.setLastDataBlockOffset(291);
97      t.setNumDataIndexLevels(3);
98      t.setComparatorClass(KeyValue.COMPARATOR.getClass());
99      t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
100     t.setUncompressedDataIndexSize(827398717L); // Something random.
101 
102     t.setLoadOnOpenOffset(128);
103     t.setMetaIndexCount(7);
104 
105     t.setTotalUncompressedBytes(129731987);
106 
107     {
108       DataOutputStream dos = new DataOutputStream(baos); // Limited scope.
109       t.serialize(dos);
110       dos.flush();
111       assertEquals(dos.size(), FixedFileTrailer.getTrailerSize(version));
112     }
113 
114     byte[] bytes = baos.toByteArray();
115     baos.reset();
116 
117     assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version));
118 
119     ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
120 
121     // Finished writing, trying to read.
122     {
123       DataInputStream dis = new DataInputStream(bais);
124       FixedFileTrailer t2 = new FixedFileTrailer(version, 
125           HFileReaderV2.PBUF_TRAILER_MINOR_VERSION);
126       t2.deserialize(dis);
127       assertEquals(-1, bais.read()); // Ensure we have read everything.
128       checkLoadedTrailer(version, t, t2);
129     }
130 
131     // Now check what happens if the trailer is corrupted.
132     Path trailerPath = new Path(util.getDataTestDir(), "trailer_" + version);
133 
134     {
135       for (byte invalidVersion : new byte[] { HFile.MIN_FORMAT_VERSION - 1,
136           HFile.MAX_FORMAT_VERSION + 1}) {
137         bytes[bytes.length - 1] = invalidVersion;
138         writeTrailer(trailerPath, null, bytes);
139         try {
140           readTrailer(trailerPath);
141           fail("Exception expected");
142         } catch (IllegalArgumentException ex) {
143           // Make it easy to debug this.
144           String msg = ex.getMessage();
145           String cleanMsg = msg.replaceAll(
146               "^(java(\\.[a-zA-Z]+)+:\\s+)?|\\s+\\(.*\\)\\s*$", "");
147           assertEquals("Actual exception message is \"" + msg + "\".\n" +
148               "Cleaned-up message", // will be followed by " expected: ..."
149               "Invalid HFile version: " + invalidVersion, cleanMsg);
150           LOG.info("Got an expected exception: " + msg);
151         }
152       }
153 
154     }
155 
156     // Now write the trailer into a file and auto-detect the version.
157     writeTrailer(trailerPath, t, null);
158 
159     FixedFileTrailer t4 = readTrailer(trailerPath);
160 
161     checkLoadedTrailer(version, t, t4);
162 
163     String trailerStr = t.toString();
164     assertEquals("Invalid number of fields in the string representation "
165         + "of the trailer: " + trailerStr, NUM_FIELDS_BY_VERSION[version - 2],
166         trailerStr.split(", ").length);
167     assertEquals(trailerStr, t4.toString());
168   }
169   
170   @Test
171   public void testTrailerForV2NonPBCompatibility() throws Exception {
172     if (version == 2) {
173       FixedFileTrailer t = new FixedFileTrailer(version,
174           HFileReaderV2.MINOR_VERSION_NO_CHECKSUM);
175       t.setDataIndexCount(3);
176       t.setEntryCount(((long) Integer.MAX_VALUE) + 1);
177       t.setLastDataBlockOffset(291);
178       t.setNumDataIndexLevels(3);
179       t.setComparatorClass(KeyValue.COMPARATOR.getClass());
180       t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
181       t.setUncompressedDataIndexSize(827398717L); // Something random.
182       t.setLoadOnOpenOffset(128);
183       t.setMetaIndexCount(7);
184       t.setTotalUncompressedBytes(129731987);
185 
186       {
187         DataOutputStream dos = new DataOutputStream(baos); // Limited scope.
188         serializeAsWritable(dos, t);
189         dos.flush();
190         assertEquals(FixedFileTrailer.getTrailerSize(version), dos.size());
191       }
192 
193       byte[] bytes = baos.toByteArray();
194       baos.reset();
195       assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version));
196 
197       ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
198       {
199         DataInputStream dis = new DataInputStream(bais);
200         FixedFileTrailer t2 = new FixedFileTrailer(version,
201             HFileReaderV2.MINOR_VERSION_NO_CHECKSUM);
202         t2.deserialize(dis);
203         assertEquals(-1, bais.read()); // Ensure we have read everything.
204         checkLoadedTrailer(version, t, t2);
205       }
206     }
207   }
208 
209   // Copied from FixedFileTrailer for testing the reading part of
210   // FixedFileTrailer of non PB
211   // serialized FFTs.
212   private void serializeAsWritable(DataOutputStream output, FixedFileTrailer fft)
213       throws IOException {
214     BlockType.TRAILER.write(output);
215     output.writeLong(fft.getFileInfoOffset());
216     output.writeLong(fft.getLoadOnOpenDataOffset());
217     output.writeInt(fft.getDataIndexCount());
218     output.writeLong(fft.getUncompressedDataIndexSize());
219     output.writeInt(fft.getMetaIndexCount());
220     output.writeLong(fft.getTotalUncompressedBytes());
221     output.writeLong(fft.getEntryCount());
222     output.writeInt(fft.getCompressionCodec().ordinal());
223     output.writeInt(fft.getNumDataIndexLevels());
224     output.writeLong(fft.getFirstDataBlockOffset());
225     output.writeLong(fft.getLastDataBlockOffset());
226     Bytes.writeStringFixedSize(output, fft.getComparatorClassName(), MAX_COMPARATOR_NAME_LENGTH);
227     output.writeInt(FixedFileTrailer.materializeVersion(fft.getMajorVersion(),
228         fft.getMinorVersion()));
229   }
230  
231 
232   private FixedFileTrailer readTrailer(Path trailerPath) throws IOException {
233     FSDataInputStream fsdis = fs.open(trailerPath);
234     FixedFileTrailer trailerRead = FixedFileTrailer.readFromStream(fsdis,
235         fs.getFileStatus(trailerPath).getLen());
236     fsdis.close();
237     return trailerRead;
238   }
239 
240   private void writeTrailer(Path trailerPath, FixedFileTrailer t,
241       byte[] useBytesInstead) throws IOException {
242     assert (t == null) != (useBytesInstead == null); // Expect one non-null.
243 
244     FSDataOutputStream fsdos = fs.create(trailerPath);
245     fsdos.write(135); // to make deserializer's job less trivial
246     if (useBytesInstead != null) {
247       fsdos.write(useBytesInstead);
248     } else {
249       t.serialize(fsdos);
250     }
251     fsdos.close();
252   }
253 
254   private void checkLoadedTrailer(int version, FixedFileTrailer expected,
255       FixedFileTrailer loaded) throws IOException {
256     assertEquals(version, loaded.getMajorVersion());
257     assertEquals(expected.getDataIndexCount(), loaded.getDataIndexCount());
258 
259     assertEquals(Math.min(expected.getEntryCount(),
260         version == 1 ? Integer.MAX_VALUE : Long.MAX_VALUE),
261         loaded.getEntryCount());
262 
263     if (version == 1) {
264       assertEquals(expected.getFileInfoOffset(), loaded.getFileInfoOffset());
265     }
266 
267     if (version == 2) {
268       assertEquals(expected.getLastDataBlockOffset(),
269           loaded.getLastDataBlockOffset());
270       assertEquals(expected.getNumDataIndexLevels(),
271           loaded.getNumDataIndexLevels());
272       assertEquals(expected.createComparator().getClass().getName(),
273           loaded.createComparator().getClass().getName());
274       assertEquals(expected.getFirstDataBlockOffset(),
275           loaded.getFirstDataBlockOffset());
276       assertTrue(
277           expected.createComparator() instanceof KeyValue.KVComparator);
278       assertEquals(expected.getUncompressedDataIndexSize(),
279           loaded.getUncompressedDataIndexSize());
280     }
281 
282     assertEquals(expected.getLoadOnOpenDataOffset(),
283         loaded.getLoadOnOpenDataOffset());
284     assertEquals(expected.getMetaIndexCount(), loaded.getMetaIndexCount());
285 
286     assertEquals(expected.getTotalUncompressedBytes(),
287         loaded.getTotalUncompressedBytes());
288   }
289 
290 
291 }
292