版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/u012292754/article/details/86509987
1 Hive常见的几种交互操作
[hadoop@node1 ~]$ hive -help
usage: hive
-d,--define <key=value> Variable subsitution to apply to hive
commands. e.g. -d A=B or --define A=B
--database <databasename> Specify the database to use
-e <quoted-query-string> SQL from command line
-f <filename> SQL from files
-H,--help Print help information
--hiveconf <property=value> Use value for given property
--hivevar <key=value> Variable subsitution to apply to hive
commands. e.g. --hivevar A=B
-i <filename> Initialization SQL file
-S,--silent Silent mode in interactive shell
-v,--verbose Verbose mode (echo executed SQL to the
console)
1.1 hive -e
[hadoop@node1 ~]$ hive -e "select * from test_hive.student"
ls: cannot access /home/hadoop/apps/spark-2.2.2-bin-2.6.0-cdh5.7.0/lib/spark-assembly-*.jar: No such file or directory
2019-01-16 16:26:45,432 WARN [main] mapreduce.TableMapReduceUtil: The hbase-prefix-tree module jar containing PrefixTreeCodec is not present. Continuing without it.
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/hadoop/apps/hbase-1.2.0-cdh5.7.0/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/hadoop/apps/hadoop-2.6.0-cdh5.7.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
2019-01-16 16:26:45,537 WARN [main] util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Logging initialized using configuration in file:/home/hadoop/apps/hive-1.1.0-cdh5.7.0/conf/hive-log4j.properties
OK
student.id student.name
1001 MIke
1002 John
1003 Mary
Time taken: 0.885 seconds, Fetched: 3 row(s)
1.2 hive -f
[hadoop@node1 ~]$ hive -f hive-f.sql
ls: cannot access /home/hadoop/apps/spark-2.2.2-bin-2.6.0-cdh5.7.0/lib/spark-assembly-*.jar: No such file or directory
2019-01-16 16:33:04,037 WARN [main] mapreduce.TableMapReduceUtil: The hbase-prefix-tree module jar containing PrefixTreeCodec is not present. Continuing without it.
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/hadoop/apps/hbase-1.2.0-cdh5.7.0/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/hadoop/apps/hadoop-2.6.0-cdh5.7.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
2019-01-16 16:33:04,162 WARN [main] util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Logging initialized using configuration in file:/home/hadoop/apps/hive-1.1.0-cdh5.7.0/conf/hive-log4j.properties
OK
student.id student.name
1001 MIke
1002 John
1003 Mary
Time taken: 0.834 seconds, Fetched: 3 row(s)
[hadoop@node1 ~]$ hive -f hive-f.sql > hive-res.txt
1.3 hive -i
- 与 udf 相互使用
2 Hive 交互命令行操作
quit / exit
set key=value
- 操作 HDFS
hive (default)> dfs -ls /;
Found 2 items
drwxrwx--- - hadoop supergroup 0 2019-01-16 14:37 /tmp
drwxr-xr-x - hadoop supergroup 0 2019-01-16 12:40 /user
!
操作Linux 命令
hive (default)> !ls /home/hadoop;
apps
appsData
derby.log
elasticsearchData
generate_log.py
hdp2.6-cdh5.7-data
hive-f.sql
hive-res.txt
log_generator.sh
metastore_db
myspark-1.0.jar
mysql-connector-java-5.1.44-bin.jar
student.txt
tempdata
testhadoop-1.0.jar
words.txt
3 查看操作历史命令
- 当前用户 / .hivehistory
[hadoop@node1 ~]$ ls -al
total 1192
drwx------. 14 hadoop hadoop 4096 Jan 16 16:34 .
drwxr-xr-x. 3 root root 20 Sep 14 11:55 ..
drwxrwxr-x. 14 hadoop hadoop 4096 Dec 26 19:30 apps
drwxrwxr-x. 6 hadoop hadoop 68 Jan 16 15:47 appsData
-rw-------. 1 hadoop hadoop 27474 Jan 16 15:12 .bash_history
-rw-r--r--. 1 hadoop hadoop 18 Apr 11 2018 .bash_logout
-rw-r--r--. 1 hadoop hadoop 193 Apr 11 2018 .bash_profile
-rw-r--r--. 1 hadoop hadoop 231 Apr 11 2018 .bashrc
-rw-rw-r--. 1 hadoop hadoop 696 Dec 28 18:38 derby.log
drwxrwxr-x. 4 hadoop hadoop 30 Oct 25 21:59 elasticsearchData
-rw-r--r--. 1 hadoop hadoop 2225 Dec 14 14:22 generate_log.py
drwxrwxrwx. 3 hadoop hadoop 26 Oct 30 16:53 hdp2.6-cdh5.7-data
-rw-r--r--. 1 hadoop hadoop 32 Jan 16 16:31 hive-f.sql
-rw-rw-r--. 1 hadoop hadoop 1148 Jan 16 15:38 .hivehistory
-rw-rw-r--. 1 hadoop hadoop 54 Jan 16 16:34 hive-res.txt
drwxrwxr-x. 4 hadoop hadoop 31 Dec 7 10:32 .ivy2
-rwxrw-r--. 1 hadoop hadoop 37 Dec 14 14:37 log_generator.sh
drwxrwxr-x. 3 hadoop hadoop 24 Oct 31 09:36 .m2
drwxrwxr-x. 5 hadoop hadoop 133 Dec 28 18:38 metastore_db
-rw-r--r--. 1 hadoop hadoop 80244 Jan 4 20:10 myspark-1.0.jar
-rw-r--r--. 1 hadoop hadoop 999635 Aug 29 2017 mysql-connector-java-5.1.44-bin.jar
-rw-------. 1 hadoop hadoop 259 Jan 16 15:19 .mysql_history
drwxrwxr-x. 2 hadoop hadoop 40 Sep 14 12:34 .oracle_jre_usage
drwxrw----. 3 hadoop hadoop 19 Oct 31 09:36 .pki
-rw-------. 1 hadoop hadoop 11712 Jan 4 09:18 .scala_history
-rw-rw-r--. 1 hadoop hadoop 8133 Oct 23 14:53 .spark_history
drwx------. 2 hadoop hadoop 80 Dec 27 08:44 .ssh
-rw-r--r--. 1 hadoop hadoop 31 Jan 16 14:47 student.txt
drwxrwxr-x. 2 hadoop hadoop 24 Dec 10 19:25 tempdata
-rw-r--r--. 1 hadoop hadoop 15826 Jan 14 19:09 testhadoop-1.0.jar
-rw-------. 1 hadoop hadoop 8663 Jan 16 12:20 .viminfo
-rw-r--r--. 1 root root 131 Dec 26 17:47 words.txt
drwxrwxr-x. 5 hadoop hadoop 46 Dec 2 18:26 .zinc
[hadoop@node1 ~]$ cat .hivehistory
CREATE TEMPORARY VIEW parquetTable
USING org.apache.spark.sql.parquet
OPTIONS (
path "/home/hadoop/apps/spark-2.1.3-bin-2.6.0-cdh5.7.0/examples/src/main/resources/users.parquet"
);
show tables;
select * from parquettable;
use default
;
show databases;
use default;
create table bf_log(ip string,user string,requesturl string);
show tables;
desc bf_log;
select count(*) from bf_log;
create table student(id int,name string) ROW FORMAT DELIMITED FIELDS
TERMINATED BY '\t';
show tables;
load data local inpath 'home/hadoop/student.txt' into table student;
load data local inpath '/home/hadoop/student.txt' into table student;
select * from student;
select id from student;
show databases;
create database test_hive;
show databases;
ues test_hive;
use test_hive;
create table student(id int,name string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
show tables;
desc student;
desc extended student;
desc fromatted student;
desc formatted student;
load data local inpath '/home/hadoop/student.txt' into table test_hive.student;
show functions;
desc function upper;
desc function extended upper;
select id,upper(name) uname from test_hive.student;