Sqoop基本操作
Sqoop版本:Sqoop 1.4.7
- mysql中增加数据源:
create database test default character set utf8mb4 collate utf8mb4_unicode_ci;
use test;
create table emp
(
id int not null
primary key,
name varchar(32) null,
deg varchar(32) null,
salary int null,
dept varchar(32) null
);
INSERT INTO emp (id, name, deg, salary, dept) VALUES (1201, 'gopal', 'manager', 50000, 'TP');
INSERT INTO emp (id, name, deg, salary, dept) VALUES (1202, 'manisha', 'Proof reader', 50000, 'TP');
INSERT INTO emp (id, name, deg, salary, dept) VALUES (1203, 'khalil', 'php dev', 30000, 'AC');
INSERT INTO emp (id, name, deg, salary, dept) VALUES (1204, 'prasanth', 'php dev', 30000, 'AC');
INSERT INTO emp (id, name, deg, salary, dept) VALUES (1205, 'kranthi', 'admin', 20000, 'TP');
create table emp_add
(
id int not null
primary key,
hno varchar(32) null,
street varchar(32) null,
city varchar(32) null
);
INSERT INTO emp_add (id, hno, street, city) VALUES (1201, '288A', 'vgiri', 'jublee');
INSERT INTO emp_add (id, hno, street, city) VALUES (1202, '108I', 'aoc', 'sec-bad');
INSERT INTO emp_add (id, hno, street, city) VALUES (1203, '144Z', 'pgutta', 'hyd');
INSERT INTO emp_add (id, hno, street, city) VALUES (1204, '78B', 'old city', 'sec-bad');
INSERT INTO emp_add (id, hno, street, city) VALUES (1205, '720X', 'hitec', 'sec-bad');
create table emp_conn
(
id int not null
primary key,
phno varchar(32) null,
email varchar(32) null
);
INSERT INTO emp_conn (id, phno, email) VALUES (1201, '2356742', 'gopal@tp.com');
INSERT INTO emp_conn (id, phno, email) VALUES (1202, '1661663', 'manisha@tp.com');
INSERT INTO emp_conn (id, phno, email) VALUES (1203, '8887776', 'khalil@ac.com');
INSERT INTO emp_conn (id, phno, email) VALUES (1204, '9988774', 'prasanth@ac.com');
INSERT INTO emp_conn (id, phno, email) VALUES (1205, '1231231', 'kranthi@tp.com');
2、emp导入到hdfs
- 不指定HDFS的目标地址
(base) [root@zhang bin]# sqoop import \
> --connect jdbc:mysql://zhang/test \
> --username root \
> --password root \
> --table emp
能运行成功,默认保存到操作系统账户的家目录里,且只有5个map程序,没有reduce程序,默认分隔符为 “,”,5个分区
(base) [root@zhang bin]# hdfs dfs -ls /user/root/emp
Found 6 items
-rw-r--r-- 1 root supergroup 0 2024-10-11 15:31 /user/root/emp/_SUCCESS
-rw-r--r-- 1 root supergroup 28 2024-10-11 15:31 /user/root/emp/part-m-00000
-rw-r--r-- 1 root supergroup 35 2024-10-11 15:31 /user/root/emp/part-m-00001
-rw-r--r-- 1 root supergroup 29 2024-10-11 15:31 /user/root/emp/part-m-00002
-rw-r--r-- 1 root supergroup 31 2024-10-11 15:31 /user/root/emp/part-m-00003
-rw-r--r-- 1 root supergroup 28 2024-10-11 15:31 /user/root/emp/part-m-00004
指定HDFS目的地
]# sqoop import --connect jdbc:mysql://zhang/test --username root --password root --table emp --target-dir "/sqoop/emp" --delete-target_dir
输出结果:5个分区,指定目标地址
(base) [root@zhang bin]# hdfs dfs -ls /sqoop/emp/
Found 6 items
-rw-r--r-- 1 root supergroup 0 2024-10-11 15:52 /sqoop/emp/_SUCCESS
-rw-r--r-- 1 root supergroup 28 2024-10-11 15:52 /sqoop/emp/part-m-00000
-rw-r--r-- 1 root supergroup 35 2024-10-11 15:52 /sqoop/emp/part-m-00001
-rw-r--r-- 1 root supergroup 29 2024-10-11 15:52 /sqoop/emp/part-m-00002
-rw-r--r-- 1 root supergroup 31 2024-10-11 15:52 /sqoop/emp/part-m-00003
-rw-r--r-- 1 root supergroup 28 2024-10-11 15:52 /sqoop/emp/part-m-00004
指定HDFS目的地,1个分区
# sqoop import --connect jdbc:mysql://zhang/test --username root --password root --table emp --target-dir "/sqoop/emp" --delete-target-dir -m 1
输出结果,1个分区
(base) [root@zhang bin]# hdfs dfs -ls /sqoop/emp/
Found 2 items
-rw-r--r-- 1 root supergroup 0 2024-10-11 15:59 /sqoop/emp/_SUCCESS
-rw-r--r-- 1 root supergroup 151 2024-10-11 15:59 /sqoop/emp/part-m-00000
指定分隔符:\001
# sqoop import --connect jdbc:mysql://zhang/test --username root --password root --table emp --target-dir "/sqoop/emp_1" --delete-target-dir -m 1 --fields-terminated-by '\001'
输出结果:
1201gopalmanager50000TP
1202manishaProof reader50000TP
1203khalilphp dev30000AC
1204prasanthphp dev30000AC
1205kranthiadmin20000TP
3 、导入到HIVE表中
在hive中建表
create table emp_add
(
id int,
hno string,
street string,
city string
) row format delimited fields terminated by "\t" stored as ORC
从mysql导入到hive表
由于HIVE表存储格式为ORC,非TEXTFILE,选择Hcatalog导入
sqoop import --connect jdbc:mysql://zhang/test --username root --password root --table emp_add -m 1 --hcatalog-table emp_add
HIVE中emp_add结果:
4,条件导入
通过where
sqoop import --connect jdbc:mysql://zhang/test --username root --password root --table emp --target-dir “/sqoop/emp” --delete-target-dir -m 1 --where “id>1205”
(base) [root@zhang ~]# hdfs dfs -cat /sqoop/emp/part-m-00000
1206,ere,rr,20300,T3
1207,ddd,ee,20200,T4
通过SQL
sqoop import --connect jdbc:mysql://zhang/test --username root --password root --target-dir "/sqoop/emp" --delete-target-dir -m 1 --query 'select deg from emp where 1=1 and $CONDITIONS'
(base) [root@zhang ~]# hdfs dfs -cat /sqoop/emp/part-m-00000
manager
Proof reader
php dev
php dev
admin
rr
ee
(base) [root@zhang ~]#
5、增量导入到hive
sqoop import --connect jdbc:mysql://zhang/test --username root --password root --table emp_add -m 1 --hcatalog-table emp_add --where "id>1205"
结果:
或
sqoop import --connect jdbc:mysql://zhang/test --username root --password root --hcatalog-table emp_add -m 1 --query 'select * from emp_add where id>1205 and $CONDITIONS'
6 、导出到Mysql
在mysql建空表(必须)
create table emp_add_mysql
(
id int,
hno varchar(32) null,
street varchar(32) null,
city varchar(32) null
);
导出到mysql
注意两边数据类型
sqoop export --connect jdbc:mysql://zhang:3306/test --username root --password root --table emp_add_mysql -m 1 --hcatalog-table emp_add