配置三台主机的主机名、网络、域名解析
#master
hostnamectl set-hostname master
bash
#slave1
hostnamectl set-hostname slave1
bash
#slave2
hostnamectl set-hostname slave2
bash
#master
nmcli con modify 网卡名 ipv4.addresses 10.10.10.128/24 ipv4.gateway 10.10.10.2 ipv4.dns 8.8.8.8 ipv4.method manual
nmcli con reload
nmcli con up 网卡名
#slave1
nmcli con modify 网卡名 ipv4.addresses 10.10.10.129/24 ipv4.gateway 10.10.10.2 ipv4.dns 8.8.8.8 ipv4.method manual
nmcli con reload
nmcli con up 网卡名
#slave2
nmcli con modify 网卡名 ipv4.addresses 10.10.10.130/24 ipv4.gateway 10.10.10.2 ipv4.dns 8.8.8.8 ipv4.method manual
nmcli con reload
nmcli con up 网卡名
#master
vi /etc/hosts
10.10.10.128 master
10.10.10.129 slave1
10.10.10.130 slave2
#slave1
vi /etc/hosts
10.10.10.128 master
10.10.10.129 slave1
10.10.10.130 slave2
#slave2
vi /etc/hosts
10.10.10.128 master
10.10.10.129 slave1
10.10.10.130 slave2
关闭防火墙和selinux
#master
systemctl disable --Now firewalld
vi /etc/selinux/config
SELINUX=disabled
setenforce 0
#slave1
systemctl disable --Now firewalld
vi /etc/selinux/config
SELINUX=disabled
setenforce 0
#slave2
systemctl disable --Now firewalld
vi /etc/selinux/config
SELINUX=disabled
setenforce 0
安装hadoop
先用xftp传包进来
#master
tar -zxvf /opt/software/jdk-8u152-linuxx64.tar.gz -C /usr/local/src
tar -zxvf hadoop-2.7.1.tar.gz -C /usr/local/src/
cd /usr/local/src/
mv jdk1.8.0_152 jdk
mv hadoop-2.7.1 hadoop
vi /etc/profile
export JAVA_HOME=/usr/local/src/jdk
export HADOOP_HOME=/usr/local/src/hadoop
export PATH=${JAVA_HOME}/bin:${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:$PATH
source /etc/profile.d/hadoop.sh
echo $PATH
vi /usr/local/src/hadoop/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/local/src/jdk
配置文件参数
#master
vi /usr/local/src/hadoop/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/src/hadoop/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/src/hadoop/dfs/data</value>
</property>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
</configuration>
mkdir -p /usr/local/src/hadoop/dfs/{name,data}
vi /usr/local/src/hadoop/etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://master:9000</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131072</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/local/src/hadoop/tmp</value>
</property>
</configuration>
mkdir -p /usr/local/src/hadoop/tmp
cd /usr/local/src/hadoop/etc/hadoop
cp mapred-site.xml.template mapred-site.xml
vi /usr/local/src/hadoop/etc/hadoop/mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>mapreduce.jobhistory.address</name>
<value>master:10020</value>
</property>
<property>
<name>mapreduce.jobhistory.webapp.address</name>
<value>master:19888</value>
</property>
</configuration>
vi /usr/local/src/hadoop/etc/hadoop/yarn-site.xml
<configuration>
<property>
<name>arn.resourcemanager.address</name>
<value>master:8032</value>
</property>
<property>
<name>yarn.resourcemanager.scheduler.address</name>
<value>master:8030</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address</name>
<value>master:8088</value>
</property>
<property>
<name>yarn.resourcemanager.resource-tracker.address</name>
<value>master:8031</value>
</property>
<property>
<name>yarn.resourcemanager.admin.address</name>
<value>master:8033</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce_shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
</configuration>
Hadoop其他配置
#master
vi /usr/local/src/hadoop/etc/hadoop/masters
10.10.10.128
vi /usr/local/src/hadoop/etc/hadoop/slaves
10.10.10.129
10.10.10.130
useradd hadoop
echo 'hadoop' | passwd --stdin hadoop
chown -R hadoop.hadoop /usr/local/src
ll /usr/local/src/
ssh-keygen -t rsa
ssh-copy-id root@slave1
ssh-copy-id root@slave2
scp -r /usr/local/src/* root@slave1:/usr/local/src/
scp -r /usr/local/src/* root@slave2:/usr/local/src/
scp /etc/profile.d/hadoop.sh root@slave1:/etc/profile.d/
scp /etc/profile.d/hadoop.sh root@slave2:/etc/profile.d/
#slave1
useradd hadoop
echo 'hadoop' | passwd --stdin hadoop
chown -R hadoop.hadoop /usr/local/src
ll /usr/local/src/
source /etc/profile.d/hadoop.sh
echo $PATH
#slave2
useradd hadoop
echo 'hadoop' | passwd --stdin hadoop
chown -R hadoop.hadoop /usr/local/src
ll /usr/local/src/
source /etc/profile.d/hadoop.sh
echo $PATH
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。