- 去清华源下载最新Hadoop,适配Ubuntu 20.04 LTS amd64的版本
wget https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/current/$(curl https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/current/ |grep -o 'hadoop-[[:digit:]].[[:digit:]].[[:digit:]]'|head -n 1).tar.gz
- 解压Hadoop
sudo tar –zxvf hadoop-* –C /opt
- 重命名
sudo mv /opt/hadoop-* /opt/hadoop
- 修改文件夹权限,避免后续sudo
sudo chown -R $USER /opt/hadoop
- 添加环境变量
echo "export PATH=$PATH:/opt/hadoop/bin:/opt/hadoop/sbin">> ~/.bashrc source ~/.bashrc
- 安装java
sudo apt install openjdk-8-jre-headless openjdk-8-jdk-headless
- 添加hadoop所需的环境变量
echo "export JAVA_HOME=/usr">>/opt/hadoop/etc/hadoop/hadoop-env.sh echo "export JAVA_HOME=/usr">>/opt/hadoop/etc/hadoop/yarn-env.sh
- 配置ssh
#生成公钥私钥 ssh-keygen #然后不停回车 #添加authorithed_keys ssh-copy-id -i /home/$USER/.ssh/id_rsa.pub $USER@localhost
- 修改hadoop相关xml配置文件
- 向/opt/hadoop/etc/hadoop/mcore-site.xml添加
hadoop.tmp.dir file:/opt/hadoop/tmp location to store temporary files fs.defaultFS hdfs://localhost:9000 - 向/opt/hadoop/etc/hadoop/hdfs-site.xml添加
dfs.replication 1 dfs.namenode.name.dir file:/opt/hadoop/tmp/dfs/name dfs.datanode.data.dir file:/opt/hadoop/tmp/dfs/data - 向/opt/hadoop/etc/hadoop/mapred-site.xml
mapreduce.framework.name yarn - 向/opt/hadoop/etc/hadoop/yarn-site.xml添加
yarn.nodemanager.aux-services mapreduce_shuffle - 初始化namenode
hdfs namenode -format
- 开启Hadoop
#不要管warning,等待10s后自动开始 start-all.sh #也可以运行下面两条命令 start-dfs.sh start-yarn.sh
- 去清华源下载最新Hbase,适配Ubuntu 20.04 LTS amd64的稳定版本
wget https://mirrors.tuna.tsinghua.edu.cn/apache/hbase/stable/$(curl https://mirrors.tuna.tsinghua.edu.cn/apache/hbase/stable/ |grep -o 'hbase-[[:digit:]].[[:digit:]].[[:digit:]]'|head -n 1)-bin.tar.gz
- 解压Hbase
sudo tar -zxvf hbase-* -C /opt/
- 重命名
sudo mv /opt/hbase-* /opt/hbase
- 修改文件夹权限,避免后续sudo
sudo chown -R $USER /opt/hbase
- 添加环境变量
echo "export PATH=$PATH:/opt/hbase/bin">> ~/.bashrc source ~/.bashrc
- 添加Hbase所需的环境变量
echo "export JAVA_HOME=/usr export Hbase_DISABLE_HADOOP_CLASSPATH_LOOKUP=true">>/opt/hbase/conf/hbase-env.sh
第二条export来自于博客解决Hbase和Hadoop之间jar包冲突
18. 修改Hbase相关xml配置文件
向/opt/hbase/conf/hbase-site.xml添加
hbase.rootdir hdfs://localhost:9000/hbase hbase.cluster.distributed true
- 关闭Hadoop安全模式
hadoop dfsadmin -safemode leave
此行为来自博客解决hbase shell异常启动
20. 打开Hbase
start-hbase.sh
- 开启localmaster
local-master-backup.sh start 2 3 5
- 开启localregionservers
local-regionservers.sh start 2 3 4 5
- 打开hbase shell
hbase shell



