在此之前需要完成docker的安装
mkdir centos-ssh && cd centos-ssh && vi Dockerfile sudo docker pull hub.c.163.com/public/centos
FROM centos # 基础镜像,在使用之前需要pull MAINTAINER bernard # 安装openssh-server和sudo软件包,并且将sshd的UsePAM参数设置成no RUN yum install -y openssh-server sudo RUN sed -i 's/UsePAM yes/UsePAM no/g' /etc/ssh/sshd_config #安装openssh-clients RUN yum install -y openssh-clients # 添加测试用户root,密码root,并且将此用户添加到sudoers里 RUN echo "root:root" | chpasswd RUN echo "root ALL=(ALL) ALL" >> /etc/sudoers RUN ssh-keygen -t dsa -f /etc/ssh/ssh_host_dsa_key RUN ssh-keygen -t rsa -f /etc/ssh/ssh_host_rsa_key # 启动sshd服务并且暴露22端口 RUN mkdir /var/run/sshd EXPOSE 22 CMD ["/usr/sbin/sshd", "-D"]
docker build -t 'bernard/centos-ssh' . # 构建
mkdir centos-ssh-root-jdk && cd centos-ssh-root-jdk # 拷贝一个jdk到这个文件夹下 vi Dockerfile #基于上一个ssh镜像构建 FROM bernard/centos-ssh #拷贝并解压jdk ADD jdk-8u301-linux-x64.tar.gz /usr/local/ RUN mv /usr/local/jdk1.8.0_231 /usr/local/jdk1.8 ENV JAVA_HOME /usr/local/jdk1.8 ENV PATH $JAVA_HOME/bin:$PATH # wq docker build -t 'bernard/centos-jdk' .
mkdir centos-ssh-root-jdk-hadoop && cd centos-ssh-root-jdk-hadoop # 拷贝一个hadoop-3.2.2到这个文件夹下 vi Dockerfile FROM bernard/centos-jdk ADD hadoop-3.2.2.tar.gz /usr/local RUN mv /usr/local/hadoop-3.2.2 /usr/local/hadoop ENV HADOOP_HOME /usr/local/hadoop ENV PATH $HADOOP_HOME/bin:$PATH # wq docker build -t 'bernard/hadoop' .
分别开三个窗口。
docker run --name hadoop0 --hostname hadoop0 -d -P -p bernard/hadoop docker run --name hadoop1 --hostname hadoop1 -d -P bernard/hadoop docker run --name hadoop2 --hostname hadoop2 -d -P bernard/hadoop配置免密登陆
- 配置hadoop0到1/2上的免密登陆
- 在hadoop0 hadoop1 hadoop2的host上增加三者的IP地址
# src ssh localhost ssh-keygen -t rsa scp ~/.ssh/id_rsa.pub root@hadoop1:~ # dst cat ~/id_rsa.pub >> ~/.ssh/authorized_keys
172.17.0.3 hadoop0 172.17.0.4 hadoop1 172.17.0.5 hadoop2 # IP地址要根据你的改配置hadoop bashrc
vi ~/.bashrc
export JAVA_HOME=/usr/local/jdk1.8
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib:$CLASSPATH
export JAVA_PATH=${JAVA_HOME}/bin:${JRE_HOME}/bin
export PATH=$PATH:${JAVA_PATH}
export HADOOP_HOME=/usr/local/hadoop
export PATH=$PATH:$HADOOP_HOME/bin
export PATH=$PATH:$HADOOP_HOME/sbin
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root
source ~/.bashrc
hadoop-env.sh
本文件和以下文件均在HADOOP_HOME/etc/hadoop下
cd /usr/local/hadoop/etc/hadoop vi hadoop-env.sh export JAVA_HOME=/usr/local/jdk1.8core-site.xml
hdfs-site.xmlfs.defaultFS hdfs://hadoop0:9000 hadoop.tmp.dir /usr/local/hadoop/tmp Abase for other temporary directories.
mapred-site.xmldfs.namenode.secondary.http-address hadoop0:50090 dfs.replication 1 dfs.namenode.name.dir file:/usr/local/hadoop/tmp/dfs/name dfs.datanode.data.dir file:/usr/local/hadoop/tmp/dfs/data
yarn-site.xmlmapreduce.framework.name yarn mapreduce.jobhistory.address hadoop0:10020 mapreduce.jobhistory.webapp.address hadoop0:19888 yarn.app.mapreduce.am.env HADOOP_MAPRED_HOME=/usr/local/hadoop mapreduce.map.env HADOOP_MAPRED_HOME=/usr/local/hadoop mapreduce.reduce.env HADOOP_MAPRED_HOME=/usr/local/hadoop
workersyarn.resourcemanager.hostname hadoop0 yarn.nodemanager.aux-services mapreduce_shuffle
hadoop0 hadoop1 hadoop2
scp * root@hadoop1:/usr/local/hadoop/etc/hadoop scp * root@hadoop2:/usr/local/hadoop/etc/hadoop启动
start-dfs.sh
https://zhuanlan.zhihu.com/p/59758201
https://www.cnblogs.com/rmxd/p/12051866.html#_label1
git clone https://github.com/jpetazzo/pipework.git sudo cp -rp pipework-master/pipework /usr/local/bin/ sudo brctl addbr br0 sudo ip link set dev br0 up sudo ip addr add 192.168.2.1/24 dev br0



