1、脚本模板1
#!/bin/bash
#sqoop的路径
export sqoop_path=/home/sqoop-1.4.7/bin
#hive的路径
export hive_path=/home/hive-3.1.2/bin
#自定义的日志输出文件
export sqoop_logs=/home/sqoop-1.4.7/my_sqoop_log/sqoop.log
#如果上面两个路径不存在的时候报错
if [ ! -e ${sqoop_path} ] || [ ! -e ${hive_path} ]
then
echo "---- path not right ---- !" >> $sqoop_logs
exit
else
echo "import start $(date +%F%n%T) !" >> $sqoop_logs
fi
#
#要导入的表 空格分开
array_default=(db_info user_tmp_d)
#
#连接配置
mysql_driver=jdbc:mysql://192.168.100.26:3306/test
username=root
password=zd3123
echo "${mysql_driver}"
# 将mysql数据库中的表直接导入到hive中
#在hive查如果有这张表了就删掉
for tables in ${array_default[@]}
do
${hive_path}/hive -e "use ods;drop table if exists ${tables}";
${sqoop_path}/sqoop import
--connect ${mysql_driver}
--username ${username}
--password ${password}
--table ${tables}
--fields-terminated-by "t"
--lines-terminated-by "n"
--hive-drop-import-delims
--hive-import
--hive-overwrite
--hive-database ods
--create-hive-table
--hive-table ${tables}
--null-string '\N'
--null-non-string '\N'
--delete-target-dir
--num-mappers 3
--driver com.mysql.jdbc.Driver;
#判断sqoop执行结果,失败退出 也可以不退出 继续执行下一张表 把exit去掉
if [ $? -ne 0 ]; then
echo "error----${tables} import error--exit---$(date +%F%n%T)!" >> $sqoop_logs
exit
else
echo "${tables} import Successfully $(date +%F%n%T)!" >> $sqoop_logs
fi
done
2、脚本模板2
#!/bin/bash
#sqoop的路径
export sqoop_path=/home/sqoop-1.4.7/bin
#hive的路径
export hive_path=/home/hive-3.1.2/bin
#自定义的日志输出文件
export sqoop_logs=/home/sqoop-1.4.7/my_sqoop_log/sqoop.log
#如果上面两个路径不存在的时候报错
if [ ! -e ${sqoop_path} ] || [ ! -e ${hive_path} ]
then
echo "---- path not right ---- !"
exit
else
echo "import start $(date +%F%n%T) !"
source /etc/profile
host=192.168.100.26
# for((i=0;i<1;i++))
# do
day=$(date "+%Y%m%d" -d "-$1 day")
if [ ${day} -gt $2 ]
then
break
else
# 把mysql中的数据导入到hdfs中
sql="select
id
,db_type
,db_version
,description
,host
,port
,user_name
,password
,create_time
,update_time
from db_info where $CONDITIONS";
${sqoop_path}/sqoop import
--connect jdbc:mysql://${host}:3306/test
--username root
--password zd3123
--query "${sql}"
--fields-terminated-by 't'
--delete-target-dir
--target-dir hdfs://192.168.100.26:9000/test/db_info/${day}/
--split-by id
--num-mappers 1
--driver com.mysql.jdbc.Driver;
#判断sqoop执行结果,失败退出 也可以不退出 把exit去掉
if [ $? -ne 0 ]; then
# echo "error----${tables} import error--exit---$(date +%F%n%T)!" >> $sqoop_logs
echo "Sqoop import data:db_info/${day} failed ..."
exit
else
# echo "${tables} import Successfully $(date +%F%n%T)!" >> $sqoop_logs
echo "Sqoop import data:db_info/${day} success..."
hive -e "
use test_ods_db;
CREATE EXTERNAL TABLE IF NOT EXISTS test_ods_db.ods_db_info (
id bigint
,db_type string
,db_version string
,description string
,host string
,port string
,user_name string
,password string
,create_time string
,update_time string
)PARTITIonED BY (
dt string
)
row format delimited fields terminated by 't';
ALTER TABLE test_ods_db.ods_db_info ADD IF NOT EXISTS partition(dt='${day}') location '${day}';
load data inpath 'hdfs://192.168.100.26:9000/test/db_info/${day}/' into table test_ods_db.ods_db_info PARTITION (dt=${day});
"
echo "Hive create table add partition: dt=${day} ok..."
fi
fi
# done
fi