https://mirrors.tuna.tsinghua.edu.cn/apache/sqoop/1.4.7/sqoop-1.4.7.bin__hadoop-2.6.0.tar.gz2.上传sqoop 3.解压sqoop
sudo tar -zxvf sqoop-1.4.7.bin__hadoop-2.6.0.tar.gz -C /usr/local/4.进入/usr/local目录
cd /usr/local5.将解压后的sqoop-1.4.7.bin__hadoop-2.6.0目录重命名为sqoop
sudo mv sqoop-1.4.7.bin__hadoop-2.6.0/ sqoop6.授权当前用户hadoop拥有sqoop目录的所有者权限
sudo chown -R hadoop:hadoop sqoop/7.进入到sqoop目录下的conf里面
cd /usr/local/sqoop/conf/8.重命名sqoop-env-template.sh为sqoop-env.sh
mv sqoop-env-template.sh sqoop-env.sh9.修改配置sqoop-env.sh,配置hadoop、hbase、zookeeper的安装目录
export HADOOP_COMMON_HOME=/usr/local/hadoop #Set path to where hadoop-*-core.jar is available export HADOOP_MAPRED_HOME=/usr/local/hadoop #set the path to where bin/hbase is available export Hbase_HOME=/usr/local/hbase #Set the path to where bin/hive is available export HIVE_HOME=/usr/local/hive #Set the path for where zookeper config dir is export ZOOCFGDIR=/usr/local/zookeeper10.进入到sqoop的lib目录下,把mysql-connector-java-5.1.47.jar驱动上传到lib中
cd /usr/local/sqoop/lib/11.拷贝hive的jar包到lib的目录中
cp /usr/local/hive/lib/hive-common-2.3.7.jar /usr/local/sqoop/lib/12.编辑环境变量 a.进入环境变量
vim ~/.bashrcb.添加以下内容
export SQOOP_HOME=/usr/local/sqoop export PATH=$PATH:$SQOOP_HOME/binc.使环境变量生效
source ~/.bashrc13.验证是否成功
sqoop version使用sqoop之前的准备 1.启动zookeeper(三台都要启动)
zkServer.sh start
2.启动hadoop(在master上启动)start-all.sh3.启动hbase(在master上)
start-hbase.sh4.所有的节点 5.启动hive(在msater)
hiveserver26.启动mysql
mysql -uroot -p1234567.创建sqoop数据库
create database if not exists sqoop;8.使用sqoop数据库
use sqoop;9.创建表
CREATE TABLE IF NOT EXISTS `student`( `id` int PRIMARY KEY COMMENT '编号', `name` varchar(20) COMMENT '名字', `age` int COMMENT '年龄' )COMMENT '学生表';10.插入数据
INSERT INTO student VALUES(1, 'zhangsan', 20); INSERT INTO student VALUES(2, 'lisi', 24); INSERT INTO student VALUES(3, 'wangwu', 18); INSERT INTO student VALUES(4, 'zhaoliui', 22);11.把mysql中student表中的数据导入到hdfs上
sqoop import --connect jdbc:mysql://localhost:3306/sqoop --username root --password 123456 --table student --target-dir /user/student --delete-target-dir -m 112.查看是否成功
hdfs dfs -cat /user/student/part-m-00000



