1、实战简介
HBase和Hadoop一样,都是用Java进行开发的,本次实训我们就来学习如何使用Java编写代码来操作HBase数据库。
实验环境:
hadoop-2.7
JDK8.0
HBase2.1.1
2、任务
1、第1关:创建表
java
package step1;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
/**
* HBase 1.0 version of ExampleClient that uses {@code Connection},
* {@code Admin} and {@code Table}.
*/
public class Task{
public void createTable()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
try {
// Create table
Admin admin = connection.getAdmin();
try {
TableName tableName = TableName.valueOf("dept");
// 新 API 构建表
// TableDescriptor 对象通过 TableDescriptorBuilder 构建;
TableDescriptorBuilder tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor family =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
tableDescriptor.setColumnFamily(family); // 设置列族
admin.createTable(tableDescriptor.build()); // 创建表
TableName emp = TableName.valueOf("emp");
// 新 API 构建表
// TableDescriptor 对象通过 TableDescriptorBuilder 构建;
TableDescriptorBuilder empDescriptor =
TableDescriptorBuilder.newBuilder(emp);
ColumnFamilyDescriptor empfamily =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("emp")).build();// 构建列族对象
empDescriptor.setColumnFamily(empfamily); // 设置列族
admin.createTable(empDescriptor.build()); // 创建表
} finally {
admin.close();
}
} finally {
connection.close();
}
/********* End *********/
}
}
java
命令行:
start-dfs.sh ( Hadoop 启动)
回车
start-hbase.sh ( hbase 启动)
2、第2关:添加数据
java
package step2;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void insertInfo()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("tb_step2");
TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
tableDescriptor.setColumnFamily(family); // 设置列族
admin.createTable(tableDescriptor.build()); // 创建表
// 添加数据
byte[] row1 = Bytes.toBytes("row1");
Put put1 = new Put(row1);
byte[] columnFamily1 = Bytes.toBytes("data"); // 列
byte[] qualifier1 = Bytes.toBytes(String.valueOf(1)); // 列族修饰词
byte[] value1 = Bytes.toBytes("张三丰"); // 值
put1.addColumn(columnFamily1, qualifier1, value1);
byte[] row2 = Bytes.toBytes("row2");
Put put2 = new Put(row2);
byte[] columnFamily2 = Bytes.toBytes("data"); // 列
byte[] qualifier2 = Bytes.toBytes(String.valueOf(2)); // 列族修饰词
byte[] value2 = Bytes.toBytes("张无忌"); // 值
put2.addColumn(columnFamily2, qualifier2, value2);
Table table = connection.getTable(tableName);
table.put(put1);
table.put(put2);
/********* End *********/
}
}
3、第3关:获取数据
java
package step3;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void queryTableInfo()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("t_step3");
Table table = connection.getTable(tableName);
// 获取数据
Get get = new Get(Bytes.toBytes("row1")); // 定义 get 对象
Result result = table.get(get); // 通过 table 对象获取数据
//System.out.println("Result: " + result);
// 很多时候我们只需要获取"值" 这里表示获取 data:1 列族的值
byte[] valueBytes = result.getValue(Bytes.toBytes("data"), Bytes.toBytes("1")); // 获取到的是字节数组
// 将字节转成字符串
String valueStr = new String(valueBytes,"utf-8");
System.out.println("value:" + valueStr);
TableName tableStep3Name = TableName.valueOf("table_step3");
Table step3Table = connection.getTable(tableStep3Name);
// 批量查询
Scan scan = new Scan();
ResultScanner scanner = step3Table.getScanner(scan);
try {
int i = 0;
for (Result scannerResult: scanner) {
//byte[] value = scannerResult.getValue(Bytes.toBytes("data"), Bytes.toBytes(1));
// System.out.println("Scan: " + scannerResult);
byte[] row = scannerResult.getRow();
System.out.println("rowName:" + new String(row,"utf-8"));
}
} finally {
scanner.close();
}
/********* End *********/
}
}
4、第4关:删除表
java
package step4;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void deleteTable()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("t_step4");
admin.disableTable(tableName);
admin.deleteTable(tableName);
/********* End *********/
}
}