hive创建hbase外部表

create'hivehbaseStu', 'stu'  
put'hivehbaseStu', 'row1', 'stu:stuName', 'tom'  
put'hivehbaseStu', 'row1', 'stu:course', 'english'  
put'hivehbaseStu', 'row1', 'stu:val', '90'  
   
put'hivehbaseStu', 'row2', 'course:stuName', 'jim'  
put'hivehbaseStu', 'row2', 'course:course', 'chinese'  
put'hivehbaseStu', 'row2', 'course:val', '60'  
   
put'hivehbaseStu', 'row3', 'course:stuName', 'john'  
put'hivehbaseStu', 'row3', 'course:course', 'english'  
put'hivehbaseStu', 'row3', 'course:val', '80'  
   
put'hivehbaseStu', 'row4', 'course:stuName', 'lily'  
put'hivehbaseStu', 'row4', 'course:course', 'math'  
put'hivehbaseStu', 'row4', 'course:val', '98'  

CREATE EXTERNAL TABLE hbasehive_table (key STRING, stuName STRING, course STRING, val INT) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ("hbase.columns.mapping" = "stu:stuName, stu:course, val:math") TBLPROPERTIES("hbase.table.name" ="hivehbaseStu");
hive> show tables;
OK
age
age_out
cite
cite_count
dummy
exter_table
hbase_table_1
hbase_table_2
hbase_table_3
hbasehive_table
myouter
pokes
pokes2
student
student1
test
testhivedrivertable
testouter
userinfoouter
userouter
wyp
Time taken: 0.071 seconds, Fetched: 21 row(s)
hive> select * from hbasehive_table;
OK
row1	tom	english	90
row2	jim	chinese	60
row3	john	english	80
row4	lily	math	98
Time taken: 0.32 seconds, Fetched: 4 row(s)
hive> select * from hbasehive_table where val > 80;
Total jobs = 1
Launching Job 1 out of 1
Number of reduce tasks is set to 0 since there's no reduce operator
Starting Job = job_1405586574373_0001, Tracking URL = http://hadoopMaster:8088/proxy/application_1405586574373_0001/
Kill Command = /usr/local/hadoop/bin/hadoop job  -kill job_1405586574373_0001
Hadoop job information for Stage-1: number of mappers: 1; number of reducers: 0
2014-07-17 17:15:41,397 Stage-1 map = 0%,  reduce = 0%
2014-07-17 17:15:55,784 Stage-1 map = 100%,  reduce = 0%, Cumulative CPU 5.14 sec
MapReduce Total cumulative CPU time: 5 seconds 140 msec
Ended Job = job_1405586574373_0001
MapReduce Jobs Launched: 
Job 0: Map: 1   Cumulative CPU: 5.14 sec   HDFS Read: 267 HDFS Write: 38 SUCCESS
Total MapReduce CPU Time Spent: 5 seconds 140 msec
OK
row1	tom	english	90
row4	lily	math	98
Time taken: 60.632 seconds, Fetched: 2 row(s)
hadoop@hadoopMaster:~$ hive --service hiveserver &
默认开启的是10000端口
========================================================
package com.hn.cluster.hive.jdbc;

import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.URLConnection;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;

public class HiveJdbcClient
{
	private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

	/**
	 * @param args
	 * @throws SQLException
	 */
	public static void main(String[] args) throws SQLException
	{
		/*Properties p = new Properties();
		p.setProperty("http.proxyHost", "proxy.cmcc");
		p.setProperty("http.proxyPort", "8080");
		System.getProperties().setProperty("socksProxyHost", "proxy.cmcc");
		System.getProperties().setProperty("socksProxyPort", "8080");*/
		try
		{
			
			/*System.getProperties().setProperty("http.proxyHost", "proxy.cmcc");
			System.getProperties().setProperty("http.proxyPort", "8080");*/
			Class.forName(driverName);
		} catch (ClassNotFoundException e)
		{
			// TODO Auto-generated catch block
			e.printStackTrace();
			System.exit(1);
		}
//		InetSocketAddress addr = new InetSocketAddress("proxy.cmcc",8080);            
//		Proxy proxy = new Proxy(Proxy.Type.HTTP, addr); // http 代理            
//		
//		Connection con = DriverManager.getConnection("jdbc:hive://218.205.81.36:10000/prob_db", p);
//		URLConnection conn = url.openConnection(proxy);            
		Connection con = DriverManager.getConnection(
				"jdbc:hive://hadoopMaster:10000/default");
		
		Statement stmt = con.createStatement();
		String tableName = "hivetbl";
		// stmt.executeQuery("drop table " + tableName);
		// ResultSet res = stmt.executeQuery("create table " + tableName +
		// " (key int, value string)");
		// show tables
		String sql = "show tables ";
		System.out.println("Running: " + sql);
		ResultSet res = stmt.executeQuery(sql);
		if (res.next())
		{
			System.out.println(res.getString(1));
		}
		// describe table
		sql = "describe " + tableName;
		System.out.println("Running: " + sql);
		res = stmt.executeQuery(sql);
		while (res.next())
		{
			System.out.println(res.getString(1) + "\t" + res.getString(2));
		}
		
		
		sql = "select * from hivetbl";
		System.out.println("Running: " + sql);
		res = stmt.executeQuery(sql);
		while (res.next())
		{
			System.out.println(res.getString(1) + "\t" + res.getString(2) + "\t" + res.getString(3) + "\t" + res.getString(4) + "\t");
		}
	}
}

 

端口配置:
<property>  
  <name>hive.hwi.listen.port</name>  
  <value>9999</value>  
  <description>This is the port the Hive Web Interface will listen on</description>  
</property>
启动服务:hive --service hiveserver &
命令行模式:hive --service cli
web界面的启动方式,hive --service hwi  
[hadoop@hadoopMaster ~]$hbase org.apache.hadoop.hbase.io.hfile.HFile -f /hbase/data/default/hivehbaseStu/b78bed2cf19f6584c942c82c9c5daa6d/stu/f9d111e2f685446eae44bde1562dfc4b -p
K: row1/stu:course/1420509822704/Put/vlen=7/mvcc=0 V: english
K: row1/stu:dt/1420509822910/Put/vlen=6/mvcc=0 V: 201412

猜你喜欢

转载自houshangxiao.iteye.com/blog/2093297