core-site.xmlhdfs-site.xml fs.defaultFS hdfs://hacluster hadoop.tmp.dir file:///opt/soft/hadoop260/tmp io.file.buffer.size 4096 ha.zookeeper.quorum hd01:2181,hd02:2181,hd03:2181 hadoop.proxyuser.root.hosts * hadoop.proxyuser.root.groups * mapper-site.xml dfs.block.size 134217728 dfs.replication 3 dfs.name.dir file:///opt/soft/hadoop260/dfs/namenode_data dfs.data.dir file:///opt/soft/hadoop260/dfs/datanode_data dfs.webhdfs.enabled true dfs.datanode.max.transfer.threads 4096 dfs.nameservices hacluster dfs.ha.namenodes.hacluster nn1,nn2 dfs.namenode.rpc-address.hacluster.nn1 hd01:9000 dfs.namenode.servicepc-address.hacluster.nn1 hd01:53310 dfs.namenode.http-address.hacluster.nn1 hd01:50070 dfs.namenode.rpc-address.hacluster.nn2 hd02:9000 dfs.namenode.servicepc-address.hacluster.nn2 hd02:53310 dfs.namenode.http-address.hacluster.nn2 hd02:50070 dfs.namenode.shared.edits.dir qjournal://hd01:8485;hd02:8485;hd03:8485/hacluster dfs.journalnode.edits.dir /opt/soft/hadoop260/dfs/journalnode_data dfs.namenode.edits.dir /opt/soft/hadoop260/dfs/edits dfs.ha.automatic-failover.enabled true dfs.client.failover.proxy.provider.hacluster org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProx yProvider dfs.ha.fencing.methods sshfence dfs.ha.fencing.ssh.private-key-files /root/.ssh/id_rsa dfs.premissions false yarn-site.xml mapreduce.framework.name yarn mapreduce.jobhistory.address hd01:10020 mapreduce.jobhistory.webapp.address hd01:19888 mapreduce.job.ubertask.enable true server ntp1.aliyun.com server ntp2.aliyun.com server ntp3.aliyun.com #JAVA ENVERonMENT export JAVA_HOME=/opt/soft/jdk180 export PATH=$JAVA_HOME/bin:$PATH export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar server.1=hd01:2888:3888 server.2=hd02:2888:3888 server.3=hd03:2888:3888 #ZOOKEEPER ENVERonMENT export ZOOKEEPER_HOME=/opt/soft/zk345 export PATH=$PATH:$ZOOKEEPER_HOME/bin #HADOOP ENVERonMENT export HADOOP_HOME=/opt/soft/hadoop260 export HADOOP_MAPRED_HOME=$HADOOP_HOME export HADOOP_COMMON_HOME=$HADOOP_HOME export HADOOP_HDFS_HOME=$HADOOP_HOME export YARN_HOME=$HADOOP_HOME export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin export HADOOP_INSTALL=$HADOOP_HOME systemctl restart network systemctl stop firewalld systemctl disable firewalld yarn.resourcemanager.ha.enabled true yarn.resourcemanager.cluster-id hayarn yarn.resourcemanager.ha.rm-ids rm1,rm2 yarn.resourcemanager.hostname.rm1 hd02 yarn.resourcemanager.hostname.rm2 hd03 yarn.resourcemanager.zk-address hd01:2181,hd02:2181,hd03:2181 yarn.resourcemanager.recovery.enabled true yarn.resourcemanager.store.class org.apache.hadoop.yarn.server.resourcemanager.recovery.ZKRMState Store yarn.resourcemanager.hostname hd03 yarn.nodemanager.aux-services mapreduce_shuffle yarn.log-aggregation-enable true yarn.log-aggregation.retain-seconds 604800



