1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.regionserver;
19
20 import java.util.Arrays;
21
22 import org.apache.commons.logging.Log;
23 import org.apache.commons.logging.LogFactory;
24
25
26
27
28
29
30
31
32 public class KeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy {
33 private static final Log LOG = LogFactory
34 .getLog(KeyPrefixRegionSplitPolicy.class);
35 @Deprecated
36 public static final String PREFIX_LENGTH_KEY_DEPRECATED = "prefix_split_key_policy.prefix_length";
37 public static final String PREFIX_LENGTH_KEY = "KeyPrefixRegionSplitPolicy.prefix_length";
38
39 private int prefixLength = 0;
40
41 @Override
42 protected void configureForRegion(HRegion region) {
43 super.configureForRegion(region);
44 if (region != null) {
45 prefixLength = 0;
46
47
48 String prefixLengthString = region.getTableDesc().getValue(
49 PREFIX_LENGTH_KEY);
50 if (prefixLengthString == null) {
51
52 prefixLengthString = region.getTableDesc().getValue(PREFIX_LENGTH_KEY_DEPRECATED);
53 if (prefixLengthString == null) {
54 LOG.error(PREFIX_LENGTH_KEY + " not specified for table "
55 + region.getTableDesc().getNameAsString()
56 + ". Using default RegionSplitPolicy");
57 return;
58 }
59 }
60 try {
61 prefixLength = Integer.parseInt(prefixLengthString);
62 } catch (NumberFormatException nfe) {
63
64 }
65 if (prefixLength <= 0) {
66 LOG.error("Invalid value for " + PREFIX_LENGTH_KEY + " for table "
67 + region.getTableDesc().getNameAsString() + ":"
68 + prefixLengthString + ". Using default RegionSplitPolicy");
69 }
70 }
71 }
72
73 @Override
74 protected byte[] getSplitPoint() {
75 byte[] splitPoint = super.getSplitPoint();
76 if (prefixLength > 0 && splitPoint != null && splitPoint.length > 0) {
77
78 return Arrays.copyOf(splitPoint,
79 Math.min(prefixLength, splitPoint.length));
80 } else {
81 return splitPoint;
82 }
83 }
84 }