一、准备工作
- 在C:\Windows\System32\drivers\etc\hosts 中添加 Linux的ip和别名映射:
➢192.168.221.140 chust01
➢@Before表示在任意使用@Test注解标注的public void方法执行之前执行 - 添加pom依赖
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.2.0-cdh5.14.2</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>1.2.0-cdh5.14.2</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>1.2.0-cdh5.14.2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0-cdh5.14.2</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
</dependencies>
二、面向对象
2、获取配置信息
package hbase;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
/**
* Unit test for simple App.
*/
public class AppTest
{
/**
* Rigorous Test :-)
*/
// @Test
// public void shouldAnswerWithTrue()
// {
// assertTrue( true );
// }
private Connection client;
private Admin admin;
//加载在所有Test的前面
@Before
public void before() throws IOException {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","chust01");
conf.set("hbase.zookeeper.property.clientPort","2181");
conf.set("hbase.master","192.168.221.140:16000");
// 获取配置信息类,需要指定hbase-site.xml和core-site.xml
// 也可以通过相应的属性进行配置
// conf.addResource(new Path("/opt/software/hadoop/hbase120/conf/hbase-site.xml"));
// conf.addResource(new Path("/opt/software/hadoop/hadoop260/etc/hadoop/core-site.xml"));
client = ConnectionFactory.createConnection(conf);
admin = client.getAdmin();
}
3、创建命名空间-create_namespace ‘’
//创建命名空间
@Test
public void createNS() throws IOException {
admin.createNamespace(NamespaceDescriptor.create("jh").build());
}
4、创建表和列簇-create ’ ‘,’ ’
//创建表
@Test
public void createTbl() throws IOException, DeserializationException {
final TableName tn = TableName.valueOf("jh:student");
if(admin.tableExists(tn)){
System.out.println(tn.getName()+" exists");
return;
}
HTableDescriptor htd = new HTableDescriptor(tn);
String[] cfs = {
"basic","score"};
for (String cf : cfs) {
// htd.addFamily(HColumnDescriptor.parseFrom(cf.getBytes()));
htd.addFamily(new HColumnDescriptor(cf));
}
admin.createTable(htd);
}
5、删除表-disable ’ ’ ; drop ’ ’
//删除表
@Test
public void dropTbl() throws IOException {
final TableName tn = TableName.valueOf("jh:student");
admin.disableTable(tn);
admin.deleteTable(tn);
admin.close();
client.close();
}
6、传入数据-put ’ ’ , ‘rowkey值’ , ‘列簇名:列名’ , '值’
private Put getPut(String rowKey, Map<String,Map<String,String>> cv){
Put put = new Put(rowKey.getBytes());
for (Map.Entry<String, Map<String,String>> entry : cv.entrySet()) {
byte[] rk = entry.getKey().getBytes();
for (Map.Entry<String, String> e : entry.getValue().entrySet()) {
put.addColumn(rk,e.getKey().getBytes(),e.getValue().getBytes());
}
}
return put;
}
@Test
public void put() throws IOException {
Table table = client.getTable(TableName.valueOf("jh:student"));
Map<String,Map<String,String>> cv = new HashMap<>();
Map<String,String> basic = new HashMap<>();
basic.put("name","zhangsan");
basic.put("age","18");
cv.put("basic",basic);
Map<String,String> score = new HashMap<>();
score.put("java","88");
score.put("hadoop","76");
cv.put("score",score);
table.put(getPut("1002",cv));
table.close();
}
7、获取数据-get ’ ’ , ’ ’ , ’ ’
private Get get(String rowKey,Map<String, List<String>> fc){
Get get = new Get(rowKey.getBytes());
for (Map.Entry<String, List<String>> entry : fc.entrySet()) {
byte[] cf = entry.getKey().getBytes();
for (String c : entry.getValue()) {
get.addColumn(cf,c.getBytes());
}
}
return get;
}
@Test
public void get() throws IOException {
Table table = client.getTable(TableName.valueOf("jh:student"));
Get get = new Get("1001".getBytes());
Map<String, List<String>> fc = new HashMap<>();
fc.put("basic", Arrays.asList("name"));
fc.put("score", Arrays.asList("hadoop"));
show(table.get(get("1001",fc)),fc);
table.close();
}
8、扫描数据-scan ’ ’
private void show(Result rst,Map<String, List<String>> fc){
if(null==rst){
System.out.println("查无数据");
return;
}
for (Map.Entry<String, List<String>> entry : fc.entrySet()) {
final byte[] cf = entry.getKey().getBytes();
for (String c : entry.getValue()) {
System.out.println(new String(rst.getValue(cf, c.getBytes())));
}
}
}
@Test
public void scan() throws IOException {
Table table = client.getTable(TableName.valueOf("jh:student"));
Scan scan = new Scan();
//BinaryComparator RegexStringComparator BinaryPrefixComparator
RowFilter rowFilter = new RowFilter(CompareFilter.CompareOp.LESS_OR_EQUAL, new RegexStringComparator(".*"));
scan.setFilter(rowFilter);
ResultScanner scanner = table.getScanner(scan);
final Iterator<Result> it = scanner.iterator();
Map<String, List<String>> fc = new HashMap<>();
fc.put("basic", Arrays.asList("name","age"));
fc.put("score", Arrays.asList("hadoop","java"));
while (it.hasNext()){
show(it.next(),fc);
}
table.close();
}
}
三、面向过程
2、util配置类:HBaseConfigUtil
- 获取配置信息类,需要指定hbase-site.xml和core-site.xml
- 也可以通过相应的属性进行配置
package cn.xym.hbase.util;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
public class HBaseConfigUtil {
public static Configuration getHBaseConfiguration() {
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "chust01");
configuration.set("hbase.zookeeper.property.clientPort", "2181");
configuration.set("hbase.master","chust01:16000");
// We can also read the config from files below
// configuration.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
// configuration.addResource(new Path("/etc/hadoop/conf/core-site.xml"));
return configuration;
}
}
3、创建表:CreateTable
package cn.xym.hbase.hbaseapitest;
import cn.xym.hbase.util.HBaseConfigUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
public class CreateTable {
public static void main(String[] args) {
Configuration config = HBaseConfigUtil.getHBaseConfiguration();
Connection connection = null;
Admin admin = null;
try {
connection = ConnectionFactory.createConnection(config);
admin = connection.getAdmin();
String tableName = "peoples";
if (!admin.isTableAvailable(TableName.valueOf(tableName))) {
HTableDescriptor hbaseTable = new HTableDescriptor(TableName.valueOf(tableName));
hbaseTable.addFamily(new HColumnDescriptor("name"));
hbaseTable.addFamily(new HColumnDescriptor("contactinfo"));
hbaseTable.addFamily(new HColumnDescriptor("personalinfo"));
admin.createTable(hbaseTable);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (admin != null) {
admin.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
}
4、删除数据:DeleteRecordsFromTable
package cn.xym.hbase.hbaseapitest;
import cn.xym.hbase.util.HBaseConfigUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.ArrayList;
import java.util.List;
public class DeleteRecordsFromTable {
public static void main(String[] args) {
Configuration config = HBaseConfigUtil.getHBaseConfiguration();
Connection connection = null;
Table table = null;
try {
connection = ConnectionFactory.createConnection(config);
table = connection.getTable(TableName.valueOf("peoples"));
List<Delete> deleteList = new ArrayList<Delete>();
for (int rowKey = 1; rowKey <= 10; rowKey++) {
deleteList.add(new Delete(Bytes.toBytes(rowKey + ""))); // here use rowKey + "" make it as string
}
//delete方法支持删除一个记录,也支持批量删除
table.delete(deleteList);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (table != null) {
table.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
}
5、删表:DeleteTable
package cn.xym.hbase.hbaseapitest;
import cn.xym.hbase.util.HBaseConfigUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
public class DeleteTable {
public static void main(String[] args) {
Configuration config = HBaseConfigUtil.getHBaseConfiguration();
Connection connection = null;
Admin admin = null;
try {
connection = ConnectionFactory.createConnection(config);
admin = connection.getAdmin();
TableName tableName = TableName.valueOf("peoples");
if (admin.isTableAvailable(tableName)) {
admin.disableTable(tableName);
admin.deleteTable(tableName);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (admin != null) {
admin.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
}
6、获取数据:FilterTable
package cn.xym.hbase.hbaseapitest;
import cn.xym.hbase.util.HBaseConfigUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.util.Bytes;
public class FilterTable {
public static void main(String[] args) {
Configuration config = HBaseConfigUtil.getHBaseConfiguration();
Connection connection = null;
Table table = null;
ResultScanner resultScanner = null;
try {
connection = ConnectionFactory.createConnection(config);
table = connection.getTable(TableName.valueOf("peoples"));
SingleColumnValueFilter filter1 = new SingleColumnValueFilter(Bytes.toBytes("personalinfo"), Bytes.toBytes("gender"), CompareOp.EQUAL, Bytes.toBytes("F"));
SingleColumnValueFilter filter2 = new SingleColumnValueFilter(Bytes.toBytes("personalinfo"), Bytes.toBytes("age"), CompareOp.GREATER_OR_EQUAL, Bytes.toBytes("25"));
FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
filterList.addFilter(filter1);
filterList.addFilter(filter2);
Scan scan = new Scan();
scan.setFilter(filterList);
scan.addColumn(Bytes.toBytes("name"), Bytes.toBytes("first"));
scan.addColumn(Bytes.toBytes("name"), Bytes.toBytes("last"));
scan.addColumn(Bytes.toBytes("contactinfo"), Bytes.toBytes("email"));
scan.addColumn(Bytes.toBytes("personalinfo"), Bytes.toBytes("gender"));
scan.addColumn(Bytes.toBytes("personalinfo"), Bytes.toBytes("age"));
resultScanner = table.getScanner(scan);
for (Result result = resultScanner.next(); result != null; result = resultScanner.next()) {
byte[] firstNameValue = result.getValue(Bytes.toBytes("name"), Bytes.toBytes("first"));
byte[] lastNameValue = result.getValue(Bytes.toBytes("name"), Bytes.toBytes("last"));
byte[] emailValue = result.getValue(Bytes.toBytes("contactinfo"), Bytes.toBytes("email"));
byte[] genderValue = result.getValue(Bytes.toBytes("personalinfo"), Bytes.toBytes("gender"));
byte[] ageValue = result.getValue(Bytes.toBytes("personalinfo"), Bytes.toBytes("age"));
String firstName = Bytes.toString(firstNameValue);
String lastName = Bytes.toString(lastNameValue);
String email = Bytes.toString(emailValue);
String gender = Bytes.toString(genderValue);
String age = Bytes.toString(ageValue);
System.out.println("First Name : " + firstName + " --- Last Name : " + lastName + " --- Email : " + email + " --- Gender : " + gender + " --- Age : " + age);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (resultScanner != null) {
resultScanner.close();
}
if (table != null) {
table.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
}
7、插入数据:InsertIntoTable
package cn.xym.hbase.hbaseapitest;
import cn.xym.hbase.util.HBaseConfigUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
/**
* @author kgc
*/
public class InsertIntoTable {
public static void main(String[] args) {
InsertIntoTable object = new InsertIntoTable();
object.insertRecords();
}
public void insertRecords() {
Configuration config = HBaseConfigUtil.getHBaseConfiguration();
Connection connection = null;
Table table = null;
try {
connection = ConnectionFactory.createConnection(config);
table = connection.getTable(TableName.valueOf("peoples"));
// creating sample data that can be used to save into hbase table
String[][] people = {
{
"1", "Marcel", "Haddad", "[email protected]", "M", "26" },
{
"2", "Franklin", "Holtz", "[email protected]", "M", "24" },
{
"3", "Dwayne", "McKee", "[email protected]", "M", "27" },
{
"4", "Rae", "Schroeder", "[email protected]", "F", "31" },
{
"5", "Rosalie", "burton", "[email protected]", "F", "25" },
{
"6", "Gabriela", "Ingram", "[email protected]", "F", "24" } };
for (int i = 0; i < people.length; i++) {
//插入数据就是构造Put对象。其中Put对象包含RowKey,ColumnFamily,Column,这三个属性都是需要指定的。
Put person = new Put(Bytes.toBytes(people[i][0]));
person.addColumn(Bytes.toBytes("name"), Bytes.toBytes("first"), Bytes.toBytes(people[i][1]));
person.addColumn(Bytes.toBytes("name"), Bytes.toBytes("last"), Bytes.toBytes(people[i][2]));
person.addColumn(Bytes.toBytes("contactinfo"), Bytes.toBytes("email"), Bytes.toBytes(people[i][3]));
person.addColumn(Bytes.toBytes("personalinfo"), Bytes.toBytes("gender"), Bytes.toBytes(people[i][4]));
person.addColumn(Bytes.toBytes("personalinfo"), Bytes.toBytes("age"), Bytes.toBytes(people[i][5]));
//插入数据实际操作
table.put(person);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (table != null) {
table.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
}
8、查看tables:ListTables
package cn.xym.hbase.hbaseapitest;
import cn.xym.hbase.util.HBaseConfigUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
public class ListTables {
public static void main(String[] args) {
ListTables object = new ListTables();
object.listTables();
}
public void listTables() {
Configuration config = HBaseConfigUtil.getHBaseConfiguration();
Connection connection = null;
Admin admin = null;
try {
connection = ConnectionFactory.createConnection(config);
admin = connection.getAdmin();
HTableDescriptor tableDescriptor[] = admin.listTables();
for (int i=0; i<tableDescriptor.length; i++) {
System.out.println(tableDescriptor[i].getNameAsString());
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (admin != null) {
admin.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
}
9、获取指定rowkey数据:ReadTable
package cn.xym.hbase.hbaseapitest;
import cn.xym.hbase.util.HBaseConfigUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
public class ReadTable {
public static void main(String[] args) {
ReadTable readTable = new ReadTable();
readTable.readTableData(args[0]);
}
public void readTableData(String rowKey) {
Configuration config = HBaseConfigUtil.getHBaseConfiguration();
Connection connection = null;
Table table = null;
try {
connection = ConnectionFactory.createConnection(config);
table = connection.getTable(TableName.valueOf("peoples"));
// Instantiating Get class
Get get = new Get(Bytes.toBytes(rowKey));
// Reading the data
Result result = table.get(get);
// Reading values from Result class object
byte[] firstNameValue = result.getValue(Bytes.toBytes("name"), Bytes.toBytes("first"));
byte[] lastNameValue = result.getValue(Bytes.toBytes("name"), Bytes.toBytes("last"));
byte[] emailValue = result.getValue(Bytes.toBytes("contactinfo"), Bytes.toBytes("email"));
byte[] genderValue = result.getValue(Bytes.toBytes("personalinfo"), Bytes.toBytes("gender"));
byte[] ageValue = result.getValue(Bytes.toBytes("personalinfo"), Bytes.toBytes("age"));
// Printing the values
String firstName = Bytes.toString(firstNameValue);
String lastName = Bytes.toString(lastNameValue);
String email = Bytes.toString(emailValue);
String gender = Bytes.toString(genderValue);
String age = Bytes.toString(ageValue);
System.out.println("First Name : " + firstName + " --- Last Name : " + lastName + " --- Email : " + email + " --- Gender : " + gender + " --- Age : " + age);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (table != null) {
table.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
}
10、扫描全表:ScanTable
package cn.xym.hbase.hbaseapitest;
import cn.xym.hbase.util.HBaseConfigUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
public class ScanTable {
public static void main(String[] args) {
Configuration config = HBaseConfigUtil.getHBaseConfiguration();
Connection connection = null;
Table table = null;
ResultScanner resultScanner = null;
try {
connection = ConnectionFactory.createConnection(config);
table = connection.getTable(TableName.valueOf("peoples"));
Scan scan = new Scan();
scan.addColumn(Bytes.toBytes("name"), Bytes.toBytes("first"));
scan.addColumn(Bytes.toBytes("name"), Bytes.toBytes("last"));
scan.addColumn(Bytes.toBytes("contactinfo"), Bytes.toBytes("email"));
scan.addColumn(Bytes.toBytes("personalinfo"), Bytes.toBytes("gender"));
scan.addColumn(Bytes.toBytes("personalinfo"), Bytes.toBytes("age"));
resultScanner = table.getScanner(scan);
for (Result result = resultScanner.next(); result != null; result = resultScanner.next()) {
byte[] firstNameValue = result.getValue(Bytes.toBytes("name"), Bytes.toBytes("first"));
byte[] lastNameValue = result.getValue(Bytes.toBytes("name"), Bytes.toBytes("last"));
byte[] emailValue = result.getValue(Bytes.toBytes("contactinfo"), Bytes.toBytes("email"));
byte[] genderValue = result.getValue(Bytes.toBytes("personalinfo"), Bytes.toBytes("gender"));
byte[] ageValue = result.getValue(Bytes.toBytes("personalinfo"), Bytes.toBytes("age"));
String firstName = Bytes.toString(firstNameValue);
String lastName = Bytes.toString(lastNameValue);
String email = Bytes.toString(emailValue);
String gender = Bytes.toString(genderValue);
String age = Bytes.toString(ageValue);
System.out.println("First Name : " + firstName + " --- Last Name : " + lastName + " --- Email : " + email + " --- Gender : " + gender + " --- Age : " + age);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (resultScanner != null) {
resultScanner.close();
}
if (table != null) {
table.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
}