具体操作步骤如下:
--1.创建测试表
create table test0825(id int, name string)
row format delimited
fields terminated by ','
stored as textfile;
--2.创建测试文件test0825.txt
1,test01
2,test02
3,test03
4,test04
5,test05
字段以逗号分隔
--3.往hive表中添加测试数据
hive -e "load data local inpath './test0825.txt' into table hduser0401.test0825";
--4.查看hive表对应的HDFS
[hduser0401@dev-l002782 ~]$ hive -S -e "show create table hduser0401.test0825";
16/08/25 17:11:23 INFO impl.MetricsConfig: loaded properties from hadoop-metrics2-hive.properties
16/08/25 17:11:23 INFO impl.MetricsSinkAdapter: Sink mysink0 started
16/08/25 17:11:23 INFO impl.MetricsSourceAdapter: MBean for source MetricsSystem,sub=Stats registered.
16/08/25 17:11:23 INFO impl.MetricsSystemImpl: Scheduled snapshot period at 1 second(s).
16/08/25 17:11:23 INFO impl.MetricsSystemImpl: hive metrics system started
createtab_stmt
CREATE TABLE `hduser0401.test0825`(
`id` int,
`name` string)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.TextInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION
'hdfs://dev-l002781.app.paic.com.cn:9000/user/hive/warehouse/hduser0401.db/test0825'
TBLPROPERTIES (
'numFiles'='2',
'COLUMN_STATS_ACCURATE'='true',
'transient_lastDdlTime'='1472114461',
'totalSize'='18',
'numRows'='0',
'rawDataSize'='0')
可以看出文件存放目录为:/user/hive/warehouse/hduser0401.db/test0825
--5.使用sqoop导出数据到Oracle中
sqoop export -D mapred.job.queue.name=queue02 --connect jdbc:oracle:thin:@d0esdm.dbdev.paic.com.cn:1526:D0ESDM --username PA18ODSDATA --password pa182007 --export-dir '/user/hive/warehouse/hduser0401.db/test0825' --columns id,name --verbose --table test0825 --input-fields-terminated-by ',' --input-lines-terminated-by '\n'
6.登陆Oracle数据库查看据
Connected as pa18odsdata
SQL> select * from test0825;
ID NAME
---------- --------------------
5 test05
4 test04
3 test03
1 test01
2 test02
补充:如果Hive中的test0825表字段是以\001分隔,那么导出命令为:
--1.创建测试表
create table test0825(id int, name string)
row format delimited
fields terminated by ','
stored as textfile;
--2.创建测试文件test0825.txt
1,test01
2,test02
3,test03
4,test04
5,test05
字段以逗号分隔
--3.往hive表中添加测试数据
hive -e "load data local inpath './test0825.txt' into table hduser0401.test0825";
--4.查看hive表对应的HDFS
[hduser0401@dev-l002782 ~]$ hive -S -e "show create table hduser0401.test0825";
16/08/25 17:11:23 INFO impl.MetricsConfig: loaded properties from hadoop-metrics2-hive.properties
16/08/25 17:11:23 INFO impl.MetricsSinkAdapter: Sink mysink0 started
16/08/25 17:11:23 INFO impl.MetricsSourceAdapter: MBean for source MetricsSystem,sub=Stats registered.
16/08/25 17:11:23 INFO impl.MetricsSystemImpl: Scheduled snapshot period at 1 second(s).
16/08/25 17:11:23 INFO impl.MetricsSystemImpl: hive metrics system started
createtab_stmt
CREATE TABLE `hduser0401.test0825`(
`id` int,
`name` string)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.TextInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION
'hdfs://dev-l002781.app.paic.com.cn:9000/user/hive/warehouse/hduser0401.db/test0825'
TBLPROPERTIES (
'numFiles'='2',
'COLUMN_STATS_ACCURATE'='true',
'transient_lastDdlTime'='1472114461',
'totalSize'='18',
'numRows'='0',
'rawDataSize'='0')
可以看出文件存放目录为:/user/hive/warehouse/hduser0401.db/test0825
--5.使用sqoop导出数据到Oracle中
sqoop export -D mapred.job.queue.name=queue02 --connect jdbc:oracle:thin:@d0esdm.dbdev.paic.com.cn:1526:D0ESDM --username PA18ODSDATA --password pa182007 --export-dir '/user/hive/warehouse/hduser0401.db/test0825' --columns id,name --verbose --table test0825 --input-fields-terminated-by ',' --input-lines-terminated-by '\n'
6.登陆Oracle数据库查看据
Connected as pa18odsdata
SQL> select * from test0825;
ID NAME
---------- --------------------
5 test05
4 test04
3 test03
1 test01
2 test02
补充:如果Hive中的test0825表字段是以\001分隔,那么导出命令为:
sqoop export -D mapred.job.queue.name=queue02 --connect jdbc:oracle:thin:@d0esdm.dbdev.paic.com.cn:1526:D0ESDM --username PA18ODSDATA --password pa182007 --export-dir '/user/hive/warehouse/hduser0401.db/test0825' --columns id,name --verbose --table test0825 --input-fields-terminated-by '\001' --input-lines-terminated-by '\n'