1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase;
21
22 import java.util.Map.Entry;
23
24 import org.apache.commons.logging.Log;
25 import org.apache.commons.logging.LogFactory;
26 import org.apache.hadoop.conf.Configuration;
27 import org.apache.hadoop.hbase.util.VersionInfo;
28
29
30
31
32 public class HBaseConfiguration extends Configuration {
33
34 private static final Log LOG = LogFactory.getLog(HBaseConfiguration.class);
35
36
37 private static final int CONVERT_TO_PERCENTAGE = 100;
38
39
40
41
42
43 @Deprecated
44 public HBaseConfiguration() {
45
46 super();
47 addHbaseResources(this);
48 LOG.warn("instantiating HBaseConfiguration() is deprecated. Please use" +
49 " HBaseConfiguration#create() to construct a plain Configuration");
50 }
51
52
53
54
55
56 @Deprecated
57 public HBaseConfiguration(final Configuration c) {
58
59 this();
60 merge(this, c);
61 }
62
63 private static void checkDefaultsVersion(Configuration conf) {
64 if (conf.getBoolean("hbase.defaults.for.version.skip", Boolean.FALSE)) return;
65 String defaultsVersion = conf.get("hbase.defaults.for.version");
66 String thisVersion = VersionInfo.getVersion();
67 if (!thisVersion.equals(defaultsVersion)) {
68 throw new RuntimeException(
69 "hbase-default.xml file seems to be for and old version of HBase (" +
70 defaultsVersion + "), this version is " + thisVersion);
71 }
72 }
73
74 private static void checkForClusterFreeMemoryLimit(Configuration conf) {
75 float globalMemstoreLimit = conf.getFloat("hbase.regionserver.global.memstore.upperLimit", 0.4f);
76 int gml = (int)(globalMemstoreLimit * CONVERT_TO_PERCENTAGE);
77 float blockCacheUpperLimit =
78 conf.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY,
79 HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT);
80 int bcul = (int)(blockCacheUpperLimit * CONVERT_TO_PERCENTAGE);
81 if (CONVERT_TO_PERCENTAGE - (gml + bcul)
82 < (int)(CONVERT_TO_PERCENTAGE *
83 HConstants.HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD)) {
84 throw new RuntimeException(
85 "Current heap configuration for MemStore and BlockCache exceeds " +
86 "the threshold required for successful cluster operation. " +
87 "The combined value cannot exceed 0.8. Please check " +
88 "the settings for hbase.regionserver.global.memstore.upperLimit and " +
89 "hfile.block.cache.size in your configuration. " +
90 "hbase.regionserver.global.memstore.upperLimit is " +
91 globalMemstoreLimit +
92 " hfile.block.cache.size is " + blockCacheUpperLimit);
93 }
94 }
95
96 public static Configuration addHbaseResources(Configuration conf) {
97 conf.addResource("hbase-default.xml");
98 conf.addResource("hbase-site.xml");
99
100 checkDefaultsVersion(conf);
101 checkForClusterFreeMemoryLimit(conf);
102 return conf;
103 }
104
105
106
107
108
109 public static Configuration create() {
110 Configuration conf = new Configuration();
111 return addHbaseResources(conf);
112 }
113
114
115
116
117
118
119 public static Configuration create(final Configuration that) {
120 Configuration conf = create();
121 merge(conf, that);
122 return conf;
123 }
124
125
126
127
128
129
130
131 public static void merge(Configuration destConf, Configuration srcConf) {
132 for (Entry<String, String> e : srcConf) {
133 destConf.set(e.getKey(), e.getValue());
134 }
135 }
136
137
138
139
140
141 public static boolean isShowConfInServlet() {
142 boolean isShowConf = false;
143 try {
144 if (Class.forName("org.apache.hadoop.conf.ConfServlet") != null) {
145 isShowConf = true;
146 }
147 } catch (Exception e) {
148
149 }
150 return isShowConf;
151 }
152
153
154
155
156
157 public static void main(String[] args) throws Exception {
158 HBaseConfiguration.create().writeXml(System.out);
159 }
160 }