1, hive安装目录: hive-env.sh2, 查看hiveserver2脚本启动过程3, 查看hive2运行命令
1, hive安装目录: hive-env.sh[root@bigdata4 ~]# tail /export/hive/conf/hive-env.sh export HADOOP_HEAPSIZE=2048 export HADOOP_CLIENT_OPTS="-Xmx2g -Xms2g -Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS" HADOOP_HOME=/export/hadoop HADOOP_CONF_DIR=/export/common/hadoop/conf export HIVE_CONF_DIR=/export/common/hive/conf export HIVE_HOME=/export/hive export HIVE_AUX_JARS_PATH=/export/server/atlas-hive-hook-v2/2, 查看hiveserver2脚本启动过程
[root@bigdata4 ~]# vim /export/hive/bin/hive
if [ -f "${HIVE_CONF_DIR}/hive-env.sh" ]; then
. "${HIVE_CONF_DIR}/hive-env.sh"
fi
###1, HIVE_AUX_JARS_PATH是单个目录时: /a/b/c/
if [ -d "${HIVE_AUX_JARS_PATH}" ]; then
hive_aux_jars_abspath=`cd ${HIVE_AUX_JARS_PATH} && pwd`
echo $hive_aux_jars_abspath
for f in $hive_aux_jars_abspath/*.jar; do
AUX_CLASSPATH=${AUX_CLASSPATH}:$f
if [ "${AUX_PARAM}" == "" ]; then
AUX_PARAM=file://$f
else
AUX_PARAM=${AUX_PARAM},file://$f;
fi
done
###2, HIVE_AUX_JARS_PATH是多个指定的jar包文件时: /tmp/a.jar,/tmp/b.jar
elif [ "${HIVE_AUX_JARS_PATH}" != "" ]; then
HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/,/:/g'`
AUX_CLASSPATH=${AUX_CLASSPATH}:${HIVE_AUX_JARS_PATH}
AUX_PARAM="file://$(echo ${HIVE_AUX_JARS_PATH} | sed 's/:/,file:///g')"
fi
###3, ${HIVE_HOME}/auxlib/目录存在,并有jar包
# adding jars from auxlib directory
for f in ${HIVE_HOME}/auxlib/*.jar; do
if [[ ! -f $f ]]; then
continue;
fi
AUX_CLASSPATH=${AUX_CLASSPATH}:$f
if [ "${AUX_PARAM}" == "" ]; then
AUX_PARAM=file://$f
else
AUX_PARAM=${AUX_PARAM},file://$f;
fi
done
###4, 最终: AUX_PARAM=file:///tmp/a.jar,/tmp/b.jar
###5, 生成服务启动参数: hive --service hiveserver2 &
if [ "${AUX_PARAM}" != "" ]; then
if [[ "$SERVICE" != beeline ]]; then
HIVE_OPTS="$HIVE_OPTS --hiveconf hive.aux.jars.path=${AUX_PARAM}"
fi
AUX_JARS_CMD_LINE="-libjars ${AUX_PARAM}"
fi
3, 查看hive2运行命令



