Skip to content

Commit

Permalink
Updated and new examples, ch05.
Browse files Browse the repository at this point in the history
  • Loading branch information
larsgeorge committed Apr 10, 2015
1 parent 27f0e3a commit 2395114
Show file tree
Hide file tree
Showing 8 changed files with 325 additions and 16 deletions.
1 change: 1 addition & 0 deletions ch04/src/main/java/coprocessor/RegionObserverExample.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.client.Get;
Expand Down
35 changes: 35 additions & 0 deletions ch05/src/main/java/admin/DelayRegionCloseObserver.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package admin;

import java.io.IOException;
import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.HRegion;

// cc DelayRegionCloseObserver Special test observer creating delays
public class DelayRegionCloseObserver extends BaseRegionObserver {
public static final Log LOG = LogFactory.getLog(HRegion.class);

// vv DelayRegionCloseObserver
private Random rnd = new Random();

@Override
public void preClose(ObserverContext<RegionCoprocessorEnvironment> c,
boolean abortRequested) throws IOException {
try {
long delay = rnd.nextInt(3);
LOG.info("@@@ Delaying region " +
c.getEnvironment().getRegion().getRegionNameAsString() +
" for " + delay + " seconds...");
Thread.sleep(delay * 1000);
} catch (InterruptedException ie) {
LOG.error(ie);
}
}
// ^^ DelayRegionCloseObserver
}
1 change: 0 additions & 1 deletion ch05/src/main/java/admin/ListTablesExample.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ public static void main(String[] args) throws IOException, InterruptedException
// vv ListTablesExample
Connection connection = ConnectionFactory.createConnection(conf);
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("testtable");

HTableDescriptor[] htds = admin.listTables();
// ^^ ListTablesExample
Expand Down
84 changes: 84 additions & 0 deletions ch05/src/main/java/admin/ListTablesExample2.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
package admin;

import java.io.IOException;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;

import util.HBaseHelper;

// cc ListTablesExample2 Example listing the existing tables with patterns
public class ListTablesExample2 {

private static void print(HTableDescriptor[] descriptors) {
for (HTableDescriptor htd : descriptors) {
System.out.println(htd.getTableName());
}
System.out.println();
}

public static void main(String[] args)
throws IOException, InterruptedException {
Configuration conf = HBaseConfiguration.create();

HBaseHelper helper = HBaseHelper.getHelper(conf);
helper.dropNamespace("testspace1", true);
helper.dropNamespace("testspace2", true);
helper.dropTable("testtable3");
helper.createNamespace("testspace1");
helper.createNamespace("testspace2");
helper.createTable("testspace1:testtable1", "colfam1");
helper.createTable("testspace2:testtable2", "colfam1");
helper.createTable("testtable3", "colfam1");

Connection connection = ConnectionFactory.createConnection(conf);
Admin admin = connection.getAdmin();

System.out.println("List: .*");
// vv ListTablesExample2
HTableDescriptor[] htds = admin.listTables(".*");
// ^^ ListTablesExample2
print(htds);
System.out.println("List: .*, including system tables");
// vv ListTablesExample2
htds = admin.listTables(".*", true);
// ^^ ListTablesExample2
print(htds);

System.out.println("List: hbase:.*, including system tables");
// vv ListTablesExample2
htds = admin.listTables("hbase:.*", true);
// ^^ ListTablesExample2
print(htds);

System.out.println("List: def.*:.*, including system tables");
// vv ListTablesExample2
htds = admin.listTables("def.*:.*", true);
// ^^ ListTablesExample2
print(htds);

System.out.println("List: test.*");
// vv ListTablesExample2
htds = admin.listTables("test.*");
// ^^ ListTablesExample2
print(htds);

System.out.println("List: .*2, using Pattern");
// vv ListTablesExample2
Pattern pattern = Pattern.compile(".*2");
htds = admin.listTables(pattern);
// ^^ ListTablesExample2
print(htds);

System.out.println("List by Namespace: testspace1");
// vv ListTablesExample2
htds = admin.listTableDescriptorsByNamespace("testspace1");
// ^^ ListTablesExample2
print(htds);
}
}
84 changes: 84 additions & 0 deletions ch05/src/main/java/admin/ListTablesExample3.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
package admin;

import java.io.IOException;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;

import util.HBaseHelper;

// cc ListTablesExample3 Example listing the existing tables with patterns
public class ListTablesExample3 {

private static void print(TableName[] tableNames) {
for (TableName name : tableNames) {
System.out.println(name);
}
System.out.println();
}

public static void main(String[] args)
throws IOException, InterruptedException {
Configuration conf = HBaseConfiguration.create();

HBaseHelper helper = HBaseHelper.getHelper(conf);
helper.dropNamespace("testspace1", true);
helper.dropNamespace("testspace2", true);
helper.dropTable("testtable3");
helper.createNamespace("testspace1");
helper.createNamespace("testspace2");
helper.createTable("testspace1:testtable1", "colfam1");
helper.createTable("testspace2:testtable2", "colfam1");
helper.createTable("testtable3", "colfam1");

Connection connection = ConnectionFactory.createConnection(conf);
Admin admin = connection.getAdmin();

System.out.println("List: .*");
// vv ListTablesExample3
TableName[] names = admin.listTableNames(".*");
// ^^ ListTablesExample3
print(names);
System.out.println("List: .*, including system tables");
// vv ListTablesExample3
names = admin.listTableNames(".*", true);
// ^^ ListTablesExample3
print(names);

System.out.println("List: hbase:.*, including system tables");
// vv ListTablesExample3
names = admin.listTableNames("hbase:.*", true);
// ^^ ListTablesExample3
print(names);

System.out.println("List: def.*:.*, including system tables");
// vv ListTablesExample3
names = admin.listTableNames("def.*:.*", true);
// ^^ ListTablesExample3
print(names);

System.out.println("List: test.*");
// vv ListTablesExample3
names = admin.listTableNames("test.*");
// ^^ ListTablesExample3
print(names);

System.out.println("List: .*2, using Pattern");
// vv ListTablesExample3
Pattern pattern = Pattern.compile(".*2");
names = admin.listTableNames(pattern);
// ^^ ListTablesExample3
print(names);

System.out.println("List by Namespace: testspace1");
// vv ListTablesExample3
names = admin.listTableNamesByNamespace("testspace1");
// ^^ ListTablesExample3
print(names);
}
}
53 changes: 39 additions & 14 deletions ch05/src/main/java/admin/ModifyTableExample.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;

import util.HBaseHelper;

Expand All @@ -22,30 +23,54 @@ public static void main(String[] args) throws IOException, InterruptedException

HBaseHelper helper = HBaseHelper.getHelper(conf);
helper.dropTable("testtable");
// vv ModifyTableExample

Connection connection = ConnectionFactory.createConnection(conf);
// vv ModifyTableExample
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("testtable");

HTableDescriptor desc = new HTableDescriptor(tableName);
HColumnDescriptor coldef1 = new HColumnDescriptor(
Bytes.toBytes("colfam1"));
desc.addFamily(coldef1);
HColumnDescriptor coldef1 = new HColumnDescriptor("colfam1");
HTableDescriptor desc = new HTableDescriptor(tableName)
.addFamily(coldef1)
.addCoprocessor(DelayRegionCloseObserver.class.getCanonicalName())
.setValue("Description", "Chapter 5 - ModifyTableExample: Original Table");

admin.createTable(desc); // co ModifyTableExample-1-CreateTable Create the table with the original structure.
admin.createTable(desc, Bytes.toBytes(1L), Bytes.toBytes(10000L), 50); // co ModifyTableExample-1-CreateTable Create the table with the original structure and 50 regions.

HTableDescriptor htd1 = admin.getTableDescriptor(tableName); // co ModifyTableExample-2-SchemaUpdate Get schema, update by adding a new family and changing the maximum file size property.
HColumnDescriptor coldef2 = new HColumnDescriptor(
Bytes.toBytes("colfam2"));
htd1.addFamily(coldef2);
htd1.setMaxFileSize(1024 * 1024 * 1024L);
HTableDescriptor htd1 = admin.getTableDescriptor(tableName); // co ModifyTableExample-2-SchemaUpdate Get schema, update by adding a new family and changing the maximum file size property. We delay the region opening artificially by setting a special "test" observer.
HColumnDescriptor coldef2 = new HColumnDescriptor("colfam2");
htd1
.addFamily(coldef2)
.setMaxFileSize(1024 * 1024 * 1024L)
.setValue("Description",
"Chapter 5 - ModifyTableExample: Modified Table");

admin.disableTable(tableName);
admin.modifyTable(tableName, htd1); // co ModifyTableExample-3-ChangeTable Disable, modify, and enable the table.
admin.modifyTable(tableName, htd1); // co ModifyTableExample-3-ChangeTable Disable and modify the table.

Pair<Integer, Integer> status = new Pair<Integer, Integer>() {{ // co ModifyTableExample-4-Pair Create a status number pair to start the loop.
setFirst(50);
setSecond(50);
}};
for (int i = 0; status.getFirst() != 0 && i < 500; i++) {
status = admin.getAlterStatus(desc.getTableName()); // co ModifyTableExample-5-Loop Loop over status until all regions are updated, or 500 seconds have been exceeded.
if (status.getSecond() != 0) {
int pending = status.getSecond() - status.getFirst();
System.out.println(pending + " of " + status.getSecond()
+ " regions updated.");
Thread.sleep(1 * 1000l);
} else {
System.out.println("All regions updated.");
break;
}
}
if (status.getFirst() != 0) {
throw new IOException("Failed to update regions after 500 seconds.");
}

admin.enableTable(tableName);

HTableDescriptor htd2 = admin.getTableDescriptor(tableName);
System.out.println("Equals: " + htd1.equals(htd2)); // co ModifyTableExample-4-Verify Check if the table schema matches the new one created locally.
System.out.println("Equals: " + htd1.equals(htd2)); // co ModifyTableExample-6-Verify Check if the table schema matches the new one created locally.
System.out.println("New schema: " + htd2);
// ^^ ModifyTableExample
}
Expand Down
52 changes: 52 additions & 0 deletions ch05/src/main/java/admin/ServerAndRegionNameExample.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
package admin;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.util.Bytes;

import util.HBaseHelper;

// cc ServerAndRegionNameExample Shows the use of server and region names
public class ServerAndRegionNameExample {

public static void main(String[] args) throws IOException, InterruptedException {
Configuration conf = HBaseConfiguration.create();
HBaseHelper helper = HBaseHelper.getHelper(conf);
helper.dropTable("testtable");
Connection connection = ConnectionFactory.createConnection(conf);
Admin admin = connection.getAdmin();

// vv ServerAndRegionNameExample
TableName tableName = TableName.valueOf("testtable");
HColumnDescriptor coldef1 = new HColumnDescriptor("colfam1");
HTableDescriptor desc = new HTableDescriptor(tableName)
.addFamily(coldef1)
.setValue("Description", "Chapter 5 - ServerAndRegionNameExample");
byte[][] regions = new byte[][] { Bytes.toBytes("ABC"),
Bytes.toBytes("DEF"), Bytes.toBytes("GHI"), Bytes.toBytes("KLM"),
Bytes.toBytes("OPQ"), Bytes.toBytes("TUV")
};
admin.createTable(desc, regions);

RegionLocator locator = connection.getRegionLocator(tableName);
HRegionLocation location = locator.getRegionLocation(Bytes.toBytes("Foo"));
HRegionInfo info = location.getRegionInfo();
System.out.println("Region Name: " + info.getRegionNameAsString());
System.out.println("Server Name: " + location.getServerName());

locator.close();
connection.close();
// ^^ ServerAndRegionNameExample
}
}
Loading

0 comments on commit 2395114

Please sign in to comment.