1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.regionserver;
19
20 import java.io.IOException;
21 import java.util.ArrayList;
22 import java.util.List;
23 import java.util.concurrent.atomic.AtomicLong;
24
25 import org.apache.commons.logging.Log;
26 import org.apache.commons.logging.LogFactory;
27 import org.apache.hadoop.conf.Configuration;
28 import org.apache.hadoop.fs.FileSystem;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.*;
31 import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
32 import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
33 import org.apache.hadoop.hbase.client.HConnection;
34 import org.apache.hadoop.hbase.client.HTable;
35 import org.apache.hadoop.hbase.client.Result;
36 import org.apache.hadoop.hbase.client.ResultScanner;
37 import org.apache.hadoop.hbase.client.Scan;
38 import org.apache.hadoop.hbase.client.ServerCallable;
39 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
40 import org.apache.hadoop.hbase.io.hfile.Compression;
41 import org.apache.hadoop.hbase.io.hfile.HFile;
42 import org.apache.hadoop.hbase.util.Bytes;
43 import org.apache.hadoop.hbase.util.Pair;
44 import org.junit.Test;
45
46 import com.google.common.collect.Lists;
47 import org.junit.experimental.categories.Category;
48
49
50
51
52
53 @Category(LargeTests.class)
54 public class TestHRegionServerBulkLoad {
55 final static Log LOG = LogFactory.getLog(TestHRegionServerBulkLoad.class);
56 private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
57 private final static Configuration conf = UTIL.getConfiguration();
58 private final static byte[] QUAL = Bytes.toBytes("qual");
59 private final static int NUM_CFS = 10;
60 public static int BLOCKSIZE = 64 * 1024;
61 public static String COMPRESSION = Compression.Algorithm.NONE.getName();
62
63 private final static byte[][] families = new byte[NUM_CFS][];
64 static {
65 for (int i = 0; i < NUM_CFS; i++) {
66 families[i] = Bytes.toBytes(family(i));
67 }
68 }
69
70 static byte[] rowkey(int i) {
71 return Bytes.toBytes(String.format("row_%08d", i));
72 }
73
74 static String family(int i) {
75 return String.format("family_%04d", i);
76 }
77
78
79
80
81 public static void createHFile(FileSystem fs, Path path, byte[] family,
82 byte[] qualifier, byte[] value, int numRows) throws IOException {
83 HFile.Writer writer = HFile
84 .getWriterFactory(conf, new CacheConfig(conf))
85 .withPath(fs, path)
86 .withBlockSize(BLOCKSIZE)
87 .withCompression(COMPRESSION)
88 .withComparator(KeyValue.KEY_COMPARATOR)
89 .create();
90 long now = System.currentTimeMillis();
91 try {
92
93 for (int i = 0; i < numRows; i++) {
94 KeyValue kv = new KeyValue(rowkey(i), family, qualifier, now, value);
95 writer.append(kv);
96 }
97 } finally {
98 writer.close();
99 }
100 }
101
102
103
104
105
106
107
108
109
110 public static class AtomicHFileLoader extends RepeatingTestThread {
111 final AtomicLong numBulkLoads = new AtomicLong();
112 final AtomicLong numCompactions = new AtomicLong();
113 private String tableName;
114
115 public AtomicHFileLoader(String tableName, TestContext ctx,
116 byte targetFamilies[][]) throws IOException {
117 super(ctx);
118 this.tableName = tableName;
119 }
120
121 public void doAnAction() throws Exception {
122 long iteration = numBulkLoads.getAndIncrement();
123 Path dir = UTIL.getDataTestDir(String.format("bulkLoad_%08d",
124 iteration));
125
126
127 FileSystem fs = UTIL.getTestFileSystem();
128 byte[] val = Bytes.toBytes(String.format("%010d", iteration));
129 final List<Pair<byte[], String>> famPaths = new ArrayList<Pair<byte[], String>>(
130 NUM_CFS);
131 for (int i = 0; i < NUM_CFS; i++) {
132 Path hfile = new Path(dir, family(i));
133 byte[] fam = Bytes.toBytes(family(i));
134 createHFile(fs, hfile, fam, QUAL, val, 1000);
135 famPaths.add(new Pair<byte[], String>(fam, hfile.toString()));
136 }
137
138
139 HConnection conn = UTIL.getHBaseAdmin().getConnection();
140 byte[] tbl = Bytes.toBytes(tableName);
141 new ServerCallable<Void>(conn, tbl, Bytes
142 .toBytes("aaa")) {
143 @Override
144 public Void call() throws Exception {
145 LOG.debug("Going to connect to server " + location + " for row "
146 + Bytes.toStringBinary(row));
147 byte[] regionName = location.getRegionInfo().getRegionName();
148 server.bulkLoadHFiles(famPaths, regionName, true);
149 return null;
150 }
151 }.withRetries();
152
153
154 if (numBulkLoads.get() % 10 == 0) {
155
156 new ServerCallable<Void>(conn, tbl,
157 Bytes.toBytes("aaa")) {
158 @Override
159 public Void call() throws Exception {
160 LOG.debug("compacting " + location + " for row "
161 + Bytes.toStringBinary(row));
162 server.compactRegion(location.getRegionInfo(), true);
163 numCompactions.incrementAndGet();
164 return null;
165 }
166 }.withRetries();
167 }
168 }
169 }
170
171
172
173
174
175 public static class AtomicScanReader extends RepeatingTestThread {
176 byte targetFamilies[][];
177 HTable table;
178 AtomicLong numScans = new AtomicLong();
179 AtomicLong numRowsScanned = new AtomicLong();
180 String TABLE_NAME;
181
182 public AtomicScanReader(String TABLE_NAME, TestContext ctx,
183 byte targetFamilies[][]) throws IOException {
184 super(ctx);
185 this.TABLE_NAME = TABLE_NAME;
186 this.targetFamilies = targetFamilies;
187 table = new HTable(conf, TABLE_NAME);
188 }
189
190 public void doAnAction() throws Exception {
191 Scan s = new Scan();
192 for (byte[] family : targetFamilies) {
193 s.addFamily(family);
194 }
195 ResultScanner scanner = table.getScanner(s);
196
197 for (Result res : scanner) {
198 byte[] lastRow = null, lastFam = null, lastQual = null;
199 byte[] gotValue = null;
200 for (byte[] family : targetFamilies) {
201 byte qualifier[] = QUAL;
202 byte thisValue[] = res.getValue(family, qualifier);
203 if (gotValue != null && thisValue != null
204 && !Bytes.equals(gotValue, thisValue)) {
205
206 StringBuilder msg = new StringBuilder();
207 msg.append("Failed on scan ").append(numScans)
208 .append(" after scanning ").append(numRowsScanned)
209 .append(" rows!\n");
210 msg.append("Current was " + Bytes.toString(res.getRow()) + "/"
211 + Bytes.toString(family) + ":" + Bytes.toString(qualifier)
212 + " = " + Bytes.toString(thisValue) + "\n");
213 msg.append("Previous was " + Bytes.toString(lastRow) + "/"
214 + Bytes.toString(lastFam) + ":" + Bytes.toString(lastQual)
215 + " = " + Bytes.toString(gotValue));
216 throw new RuntimeException(msg.toString());
217 }
218
219 lastFam = family;
220 lastQual = qualifier;
221 lastRow = res.getRow();
222 gotValue = thisValue;
223 }
224 numRowsScanned.getAndIncrement();
225 }
226 numScans.getAndIncrement();
227 }
228 }
229
230
231
232
233
234 private void setupTable(String table, int cfs) throws IOException {
235 try {
236 LOG.info("Creating table " + table);
237 HTableDescriptor htd = new HTableDescriptor(table);
238 for (int i = 0; i < 10; i++) {
239 htd.addFamily(new HColumnDescriptor(family(i)));
240 }
241
242 UTIL.getHBaseAdmin().createTable(htd);
243 } catch (TableExistsException tee) {
244 LOG.info("Table " + table + " already exists");
245 }
246 }
247
248
249
250
251 @Test
252 public void testAtomicBulkLoad() throws Exception {
253 String TABLE_NAME = "atomicBulkLoad";
254
255 int millisToRun = 30000;
256 int numScanners = 50;
257
258 UTIL.startMiniCluster(1);
259 try {
260 runAtomicBulkloadTest(TABLE_NAME, millisToRun, numScanners);
261 } finally {
262 UTIL.shutdownMiniCluster();
263 }
264 }
265
266 void runAtomicBulkloadTest(String tableName, int millisToRun, int numScanners)
267 throws Exception {
268 setupTable(tableName, 10);
269
270 TestContext ctx = new TestContext(UTIL.getConfiguration());
271
272 AtomicHFileLoader loader = new AtomicHFileLoader(tableName, ctx, null);
273 ctx.addThread(loader);
274
275 List<AtomicScanReader> scanners = Lists.newArrayList();
276 for (int i = 0; i < numScanners; i++) {
277 AtomicScanReader scanner = new AtomicScanReader(tableName, ctx, families);
278 scanners.add(scanner);
279 ctx.addThread(scanner);
280 }
281
282 ctx.startThreads();
283 ctx.waitFor(millisToRun);
284 ctx.stop();
285
286 LOG.info("Loaders:");
287 LOG.info(" loaded " + loader.numBulkLoads.get());
288 LOG.info(" compations " + loader.numCompactions.get());
289
290 LOG.info("Scanners:");
291 for (AtomicScanReader scanner : scanners) {
292 LOG.info(" scanned " + scanner.numScans.get());
293 LOG.info(" verified " + scanner.numRowsScanned.get() + " rows");
294 }
295 }
296
297
298
299
300
301 public static void main(String args[]) throws Exception {
302 try {
303 Configuration c = HBaseConfiguration.create();
304 TestHRegionServerBulkLoad test = new TestHRegionServerBulkLoad();
305 test.setConf(c);
306 test.runAtomicBulkloadTest("atomicTableTest", 5 * 60 * 1000, 50);
307 } finally {
308 System.exit(0);
309 }
310 }
311
312 private void setConf(Configuration c) {
313 UTIL = new HBaseTestingUtility(c);
314 }
315
316 @org.junit.Rule
317 public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
318 new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
319 }
320