brew install hadoop
brew install hive
Hadoop3.3安装
2. 参数配置参考地址:Hive安装配置
(1)环境变量:
## java
export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_321.jdk/Contents/Home
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
## go
export GOROOT=/usr/local/Cellar/go/1.18
export GOPATH=/Users/apple/workspace/GoProjects
export GOBIN=$GOPATH/bin
export GO111MODULE=on
export GOPROXY=https://goproxy.cn,https://goproxy.io,direct
export GOSUMDB=sum.golang.google.cn
## hadoop
export HADOOP_HOME=/usr/local/Cellar/hadoop/3.3.2/libexec
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_CLASSPATH=`hadoop classpath`
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$HADOOP_HOME/lib/native/"
## hive
export HIVE_HOME=/usr/local/Cellar/hive/3.1.3/libexec
export HIVE_CONF_DIR=$HIVE_HOME/conf
(2)hadoop-env.sh
############################################################# export JAVA_HOME=/usr/local/Cellar/openjdk@8/1.8.0+322 export HADOOP_CONF_DIR=/usr/local/Cellar/hadoop/3.3.2/libexec/etc/hadoop
(3)core-site.xml
fs.defaultFS hdfs://192.168.31.120:9000 hadoop.tmp.dir /usr/local/Cellar/hadoop/3.3.2/data/tmp
(4)hdfs-site.xml
3. Hive配置dfs.replication 1 dfs.permissions.enabled false
(1)hive-env.sh
#hadoop HADOOP_HOME=/usr/local/Cellar/hadoop/3.3.2/libexec #hive conf export HIVE_CONF_DIR=/usr/local/Cellar/hive/3.1.3/libexec/conf #hive lib export HIVE_AUX_JARS_PATH=/usr/local/Cellar/hive/3.1.3/libexec/lib
(2)hive-site.xml
javax.jdo.option.ConnectionURL jdbc:mysql://localhost:3306/metastore?createDatabaseIfNotExist=true&useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=GMT javax.jdo.option.ConnectionDriverName com.mysql.cj.jdbc.Driver javax.jdo.option.ConnectionUserName root javax.jdo.option.ConnectionPassword 123456aA system:java.io.tmpdir /usr/local/Cellar/hive/3.1.3/data/tmp hive.cli.print.header true hive.cli.print.current.db true
(3)将MySQL的JDBC驱动拷贝到Hive的lib目录下:
wget https://cdn.mysql.com//Downloads/Connector-J/mysql-connector-java-8.0.27.tar.gz
(4)创建文件夹
##创建文件夹 hdfs dfs -mkdir -p /user/hive/warehouse hadoop fs -mkdir -p /user/hive/tmp hadoop fs -mkdir -p /user/hive/log ## 修改文件夹权限 hadoop fs -chmod -R 777 /user/hive/warehouse hadoop fs -chmod -R 777 /user/hive/tmp hadoop fs -chmod -R 777 /user/hive/log
(5)关闭安全模式
hdfs dfsadmin -safemode leave
(6)初始化元数据
schematool -dbType mysql -initSchema
(7).服务端后台开启metastore
nohup hive --service metastore &
(8)启动hiveserver2节点
nohup bin/hiveserver2 &
(9)连接
beeline> !connect jdbc:hive2://hadoop1:10000 #默认端口号是10000 Connecting to jdbc:hive2://hadoop1:10000 #输入账号密码 Enter username for jdbc:hive2://hadoop1:10000: root Enter password for jdbc:hive2://hadoop1:10000: ******
(10)
hdfs: http://localhost:9870/
hive: http://localhost:10002/



