25
25
import java .io .File ;
26
26
import java .io .IOException ;
27
27
import java .util .Arrays ;
28
+ import java .util .Collections ;
28
29
import java .util .HashMap ;
29
30
import java .util .HashSet ;
30
31
import java .util .Map ;
37
38
import org .apache .hadoop .hbase .CoprocessorEnvironment ;
38
39
import org .apache .hadoop .hbase .HBaseClassTestRule ;
39
40
import org .apache .hadoop .hbase .HBaseTestingUtility ;
40
- import org .apache .hadoop .hbase .HColumnDescriptor ;
41
- import org .apache .hadoop .hbase .HTableDescriptor ;
42
41
import org .apache .hadoop .hbase .MiniHBaseCluster ;
43
42
import org .apache .hadoop .hbase .RegionMetrics ;
44
43
import org .apache .hadoop .hbase .ServerMetrics ;
45
44
import org .apache .hadoop .hbase .ServerName ;
46
45
import org .apache .hadoop .hbase .TableName ;
47
46
import org .apache .hadoop .hbase .client .Admin ;
47
+ import org .apache .hadoop .hbase .client .ColumnFamilyDescriptorBuilder ;
48
+ import org .apache .hadoop .hbase .client .CoprocessorDescriptor ;
49
+ import org .apache .hadoop .hbase .client .CoprocessorDescriptorBuilder ;
50
+ import org .apache .hadoop .hbase .client .TableDescriptor ;
51
+ import org .apache .hadoop .hbase .client .TableDescriptorBuilder ;
48
52
import org .apache .hadoop .hbase .regionserver .HRegion ;
49
53
import org .apache .hadoop .hbase .regionserver .Region ;
50
54
import org .apache .hadoop .hbase .regionserver .TestServerCustomProtocol ;
51
55
import org .apache .hadoop .hbase .testclassification .CoprocessorTests ;
52
56
import org .apache .hadoop .hbase .testclassification .MediumTests ;
57
+ import org .apache .hadoop .hbase .util .Bytes ;
53
58
import org .apache .hadoop .hbase .util .ClassLoaderTestHelper ;
54
59
import org .apache .hadoop .hbase .util .CoprocessorClassLoader ;
55
60
import org .apache .hadoop .hdfs .MiniDFSCluster ;
@@ -167,14 +172,15 @@ public void testClassLoadingFromHDFS() throws Exception {
167
172
LOG .info ("Copied jar file to HDFS: " + jarFileOnHDFS2 );
168
173
169
174
// create a table that references the coprocessors
170
- HTableDescriptor htd = new HTableDescriptor (tableName );
171
- htd .addFamily (new HColumnDescriptor ("test" ));
172
- // without configuration values
173
- htd .setValue ("COPROCESSOR$1" , jarFileOnHDFS1 .toString () + "|" + cpName1 +
174
- "|" + Coprocessor .PRIORITY_USER );
175
- // with configuration values
176
- htd .setValue ("COPROCESSOR$2" , jarFileOnHDFS2 .toString () + "|" + cpName2 +
177
- "|" + Coprocessor .PRIORITY_USER + "|k1=v1,k2=v2,k3=v3" );
175
+ TableDescriptorBuilder tdb = TableDescriptorBuilder .newBuilder (tableName );
176
+ tdb .setColumnFamily (ColumnFamilyDescriptorBuilder
177
+ .newBuilder (Bytes .toBytes ("test" )).build ());
178
+ // without configuration values
179
+ tdb .setValue ("COPROCESSOR$1" , jarFileOnHDFS1 + "|" + cpName1
180
+ + "|" + Coprocessor .PRIORITY_USER );
181
+ // with configuration values
182
+ tdb .setValue ("COPROCESSOR$2" , jarFileOnHDFS2 + "|" + cpName2
183
+ + "|" + Coprocessor .PRIORITY_USER + "|k1=v1,k2=v2,k3=v3" );
178
184
Admin admin = TEST_UTIL .getAdmin ();
179
185
if (admin .tableExists (tableName )) {
180
186
if (admin .isTableEnabled (tableName )) {
@@ -185,8 +191,9 @@ public void testClassLoadingFromHDFS() throws Exception {
185
191
CoprocessorClassLoader .clearCache ();
186
192
byte [] startKey = {10 , 63 };
187
193
byte [] endKey = {12 , 43 };
188
- admin .createTable (htd , startKey , endKey , 4 );
189
- waitForTable (htd .getTableName ());
194
+ TableDescriptor tableDescriptor = tdb .build ();
195
+ admin .createTable (tableDescriptor , startKey , endKey , 4 );
196
+ waitForTable (tableDescriptor .getTableName ());
190
197
191
198
// verify that the coprocessors were loaded
192
199
boolean foundTableRegion =false ;
@@ -253,13 +260,15 @@ public void testClassLoadingFromLocalFS() throws Exception {
253
260
File jarFile = buildCoprocessorJar (cpName3 );
254
261
255
262
// create a table that references the jar
256
- HTableDescriptor htd = new HTableDescriptor (TableName .valueOf (cpName3 ));
257
- htd .addFamily (new HColumnDescriptor ("test" ));
258
- htd .setValue ("COPROCESSOR$1" , getLocalPath (jarFile ) + "|" + cpName3 + "|" +
263
+ TableDescriptorBuilder tdb = TableDescriptorBuilder .newBuilder (TableName .valueOf (cpName3 ));
264
+ tdb .setColumnFamily (ColumnFamilyDescriptorBuilder
265
+ .newBuilder (Bytes .toBytes ("test" )).build ());
266
+ tdb .setValue ("COPROCESSOR$1" , getLocalPath (jarFile ) + "|" + cpName3 + "|" +
259
267
Coprocessor .PRIORITY_USER );
268
+ TableDescriptor tableDescriptor = tdb .build ();
260
269
Admin admin = TEST_UTIL .getAdmin ();
261
- admin .createTable (htd );
262
- waitForTable (htd .getTableName ());
270
+ admin .createTable (tableDescriptor );
271
+ waitForTable (tableDescriptor .getTableName ());
263
272
264
273
// verify that the coprocessor was loaded
265
274
boolean found = false ;
@@ -278,13 +287,15 @@ public void testPrivateClassLoader() throws Exception {
278
287
File jarFile = buildCoprocessorJar (cpName4 );
279
288
280
289
// create a table that references the jar
281
- HTableDescriptor htd = new HTableDescriptor (TableName .valueOf (cpName4 ));
282
- htd .addFamily (new HColumnDescriptor ("test" ));
283
- htd .setValue ("COPROCESSOR$1" , getLocalPath (jarFile ) + "|" + cpName4 + "|" +
290
+ TableDescriptorBuilder tdb = TableDescriptorBuilder .newBuilder (TableName .valueOf (cpName4 ));
291
+ tdb .setColumnFamily (ColumnFamilyDescriptorBuilder
292
+ .newBuilder (Bytes .toBytes ("test" )).build ());
293
+ tdb .setValue ("COPROCESSOR$1" , getLocalPath (jarFile ) + "|" + cpName4 + "|" +
284
294
Coprocessor .PRIORITY_USER );
295
+ TableDescriptor tableDescriptor = tdb .build ();
285
296
Admin admin = TEST_UTIL .getAdmin ();
286
- admin .createTable (htd );
287
- waitForTable (htd .getTableName ());
297
+ admin .createTable (tableDescriptor );
298
+ waitForTable (tableDescriptor .getTableName ());
288
299
289
300
// verify that the coprocessor was loaded correctly
290
301
boolean found = false ;
@@ -325,23 +336,35 @@ public void testHBase3810() throws Exception {
325
336
" | org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver | | k=v " ;
326
337
327
338
// create a table that references the jar
328
- HTableDescriptor htd = new HTableDescriptor (tableName );
329
- htd .addFamily (new HColumnDescriptor ("test" ));
339
+ TableDescriptorBuilder tdb = TableDescriptorBuilder .newBuilder (tableName );
340
+ tdb .setColumnFamily (ColumnFamilyDescriptorBuilder
341
+ .newBuilder (Bytes .toBytes ("test" )).build ());
330
342
331
343
// add 3 coprocessors by setting htd attributes directly.
332
- htd .setValue (cpKey1 , cpValue1 );
333
- htd .setValue (cpKey2 , cpValue2 );
334
- htd .setValue (cpKey3 , cpValue3 );
344
+ tdb .setValue (cpKey1 , cpValue1 );
345
+ tdb .setValue (cpKey2 , cpValue2 );
346
+ tdb .setValue (cpKey3 , cpValue3 );
335
347
336
348
// add 2 coprocessor by using new htd.setCoprocessor() api
337
- htd .addCoprocessor (cpName5 , new Path (getLocalPath (jarFile5 )),
338
- Coprocessor .PRIORITY_USER , null );
349
+ CoprocessorDescriptor coprocessorDescriptor = CoprocessorDescriptorBuilder
350
+ .newBuilder (cpName5 )
351
+ .setJarPath (new Path (getLocalPath (jarFile5 )).toString ())
352
+ .setPriority (Coprocessor .PRIORITY_USER )
353
+ .setProperties (Collections .emptyMap ())
354
+ .build ();
355
+ tdb .setCoprocessor (coprocessorDescriptor );
339
356
Map <String , String > kvs = new HashMap <>();
340
357
kvs .put ("k1" , "v1" );
341
358
kvs .put ("k2" , "v2" );
342
359
kvs .put ("k3" , "v3" );
343
- htd .addCoprocessor (cpName6 , new Path (getLocalPath (jarFile6 )),
344
- Coprocessor .PRIORITY_USER , kvs );
360
+
361
+ coprocessorDescriptor = CoprocessorDescriptorBuilder
362
+ .newBuilder (cpName6 )
363
+ .setJarPath (new Path (getLocalPath (jarFile6 )).toString ())
364
+ .setPriority (Coprocessor .PRIORITY_USER )
365
+ .setProperties (kvs )
366
+ .build ();
367
+ tdb .setCoprocessor (coprocessorDescriptor );
345
368
346
369
Admin admin = TEST_UTIL .getAdmin ();
347
370
if (admin .tableExists (tableName )) {
@@ -350,8 +373,10 @@ public void testHBase3810() throws Exception {
350
373
}
351
374
admin .deleteTable (tableName );
352
375
}
353
- admin .createTable (htd );
354
- waitForTable (htd .getTableName ());
376
+
377
+ TableDescriptor tableDescriptor = tdb .build ();
378
+ admin .createTable (tableDescriptor );
379
+ waitForTable (tableDescriptor .getTableName ());
355
380
356
381
// verify that the coprocessor was loaded
357
382
boolean found_2 = false , found_1 = false , found_3 = false ,
@@ -426,23 +451,26 @@ void loadingClassFromLibDirInJar(String libPrefix) throws Exception {
426
451
LOG .info ("Copied jar file to HDFS: " + jarFileOnHDFS );
427
452
428
453
// create a table that references the coprocessors
429
- HTableDescriptor htd = new HTableDescriptor (tableName );
430
- htd .addFamily (new HColumnDescriptor ("test" ));
454
+ TableDescriptorBuilder tdb = TableDescriptorBuilder .newBuilder (tableName );
455
+ tdb .setColumnFamily (ColumnFamilyDescriptorBuilder
456
+ .newBuilder (Bytes .toBytes ("test" )).build ());
431
457
// without configuration values
432
- htd .setValue ("COPROCESSOR$1" , jarFileOnHDFS . toString () + "|" + cpName1 +
433
- "|" + Coprocessor .PRIORITY_USER );
458
+ tdb .setValue ("COPROCESSOR$1" , jarFileOnHDFS + "|" + cpName1
459
+ + "|" + Coprocessor .PRIORITY_USER );
434
460
// with configuration values
435
- htd .setValue ("COPROCESSOR$2" , jarFileOnHDFS . toString () + "|" + cpName2 +
436
- "|" + Coprocessor .PRIORITY_USER + "|k1=v1,k2=v2,k3=v3" );
461
+ tdb .setValue ("COPROCESSOR$2" , jarFileOnHDFS + "|" + cpName2
462
+ + "|" + Coprocessor .PRIORITY_USER + "|k1=v1,k2=v2,k3=v3" );
437
463
Admin admin = TEST_UTIL .getAdmin ();
438
464
if (admin .tableExists (tableName )) {
439
465
if (admin .isTableEnabled (tableName )) {
440
466
admin .disableTable (tableName );
441
467
}
442
468
admin .deleteTable (tableName );
443
469
}
444
- admin .createTable (htd );
445
- waitForTable (htd .getTableName ());
470
+
471
+ TableDescriptor tableDescriptor = tdb .build ();
472
+ admin .createTable (tableDescriptor );
473
+ waitForTable (tableDescriptor .getTableName ());
446
474
447
475
// verify that the coprocessors were loaded
448
476
boolean found1 = false , found2 = false , found2_k1 = false ,
0 commit comments