CentOS 7 安装Hadoop 2.7.1
两台机器 CentOS7(机器名分别为master-CentOS7、slave-CentOS7) 内存2G (笔记本开虚拟机快撑不住了╮(╯-╰)╭
CentOS7 与 CetnOS6 有一些区别
网络配置
master-CentOS7
[root@localhost ~]# vi /etc/sysconfig/network-scripts/ifcfg-eno16777736 TYPE=Ethernet BOOTPROTO=static DEFROUTE=yes PEERDNS=yes PEERROUTES=yes IPV4_FAILURE_FATAL=no IPV6INIT=yes IPV6_AUTOCONF=yes IPV6_DEFROUTE=yes IPV6_PEERDNS=yes IPV6_PEERROUTES=yes IPV6_FAILURE_FATAL=no NAME=eno16777736 UUID=b30f5765-ecd7-4dba-a0ed-ebac92c836bd DEVICE=eno16777736 ONBOOT=yes IPADDR=192.168.1.182 NETMASK=255.255.255.0 GATEWAY=192.168.1.1 DNS1=114.114.114.114 DNS2=8.8.4.4 网络信息根据自己实际的网络情况配置。 [root@localhost ~]# systemctl restart network [root@localhost ~]# ifconfig
slave-CentOS7
[root@localhost ~]# vi /etc/sysconfig/network-scripts/ifcfg-eno16777736 TYPE=Ethernet BOOTPROTO=static DEFROUTE=yes PEERDNS=yes PEERROUTES=yes IPV4_FAILURE_FATAL=no IPV6INIT=yes IPV6_AUTOCONF=yes IPV6_DEFROUTE=yes IPV6_PEERDNS=yes IPV6_PEERROUTES=yes IPV6_FAILURE_FATAL=no NAME=eno16777736 UUID=b30f5765-ecd7-4dba-a0ed-ebac92c836bd DEVICE=eno16777736 ONBOOT=yes IPADDR=192.168.1.183 NETMASK=255.255.255.0 GATEWAY=192.168.1.1 DNS1=114.114.114.114 DNS2=8.8.4.4 网络信息根据自己实际的网络情况配置。 [root@localhost ~]# systemctl restart network [root@localhost ~]# ifconfig
设置hosts、hostname
master-CentOS7
[root@localhost ~]# vi /etc/hosts 添加 192.168.1.182 master 192.168.1.183 slave [root@localhost ~]# vi /etc/hostname localhost.localdomain 内容修改为 master
slave-CentOS7
[root@localhost ~]# vi /etc/hosts 添加 192.168.1.182 master 192.168.1.183 slave [root@localhost ~]# vi /etc/hostname localhost.localdomain 内容修改为 slave
关闭selinux
master-CentOS7
[root@master ~]# getenforce Enforcing [root@master ~]# vi /etc/selinux/config SELINUX=enforcing 修改为 SELINUX=disabled 保存重启 [root@master ~]# getenforce Disabled
slave-CentOS7
[root@slave ~]# getenforce Enforcing [root@slave ~]# vi /etc/selinux/config SELINUX=enforcing 修改为 SELINUX=disabled 保存重启 [root@slave ~]# getenforce Disabled
关闭firewalld
master-CentOS7
[root@master ~]# systemctl disable firewalld Removed symlink /etc/systemd/system/dbus-org.Fedoraproject.FirewallD1.service. Removed symlink /etc/systemd/system/basic.target.wants/firewalld.service. [root@master ~]# systemctl stop firewalld [root@master ~]# iptables -nvL Chain INPUT (policy ACCEPT 0 packets, 0 bytes) pkts bytes target prot opt in out source destination Chain FORWARD (policy ACCEPT 0 packets, 0 bytes) pkts bytes target prot opt in out source destination Chain OUTPUT (policy ACCEPT 0 packets, 0 bytes) pkts bytes target prot opt in out source destination [root@master ~]# yum install -y iptables-services [root@master ~]# service iptables save iptables: Saving firewall rules to /etc/sysconfig/iptables:[ 确定 ] [root@master ~]# systemctl enable iptables Created symlink from /etc/systemd/system/basic.target.wants/iptables.service to /usr/lib/systemd/system/iptables.service.
slave-CentOS7
[root@slave ~]# systemctl disable firewalld Removed symlink /etc/systemd/system/dbus-org.fedoraproject.FirewallD1.service. Removed symlink /etc/systemd/system/basic.target.wants/firewalld.service. [root@slave ~]# systemctl stop firewalld [root@slave ~]# iptables -nvL Chain INPUT (policy ACCEPT 0 packets, 0 bytes) pkts bytes target prot opt in out source destination Chain FORWARD (policy ACCEPT 0 packets, 0 bytes) pkts bytes target prot opt in out source destination Chain OUTPUT (policy ACCEPT 0 packets, 0 bytes) pkts bytes target prot opt in out source destination [root@slave ~]# yum install -y iptables-services [root@slave ~]# service iptables save iptables: Saving firewall rules to /etc/sysconfig/iptables:[ 确定 ] [root@slave ~]# systemctl enable iptables Created symlink from /etc/systemd/system/basic.target.wants/iptables.service to /usr/lib/systemd/system/iptables.service.
密钥登陆
master-CentOS7
[root@master ~]# ssh-keygen 一直回车 [root@master ~]# cat .ssh/id_rsa.pub 复制~/.ssh/id_rsa.pub 内容
slave-CentOS7
[root@slave ~]# vi .ssh/authorized_keys 复制~/.ssh/id_rsa.pub 内容到 ~/.ssh/authorized_keys时报错 ".ssh/authorized_keys" E212: Can't open file for writing 解决方案 [root@slave ~]# ls -ld .ssh ls: 无法访问.ssh: 没有那个文件或目录 [root@slave ~]# mkdir .ssh; chmod 700 .ssh [root@slave ~]# ls -ld .ssh drwx------ 2 root root 6 8月 28 15:59 .ssh [root@slave ~]# vi .ssh/authorized_keys 复制~/.ssh/id_rsa.pub 内容到 ~/.ssh/authorized_keys [root@slave ~]# ls -l !$ ls -l .ssh/authorized_keys -rw-r--r-- 1 root root 418 8月 28 16:02 .ssh/authorized_keys
master-CentOS7
[root@master ~]# vi .ssh/authorized_keys 复制~/.ssh/id_rsa.pub 内容到 ~/.ssh/authorized_keys
测试
master-CentOS7
[root@master ~]# ssh master [root@master ~]# exit [root@master ~]# ssh slave [root@slave ~]# exit
安装JDK
Hadoop2.7 需要安装jdk1.7版本,下载地址http://www.oracle.com/technetwork/java/javase/downloads/jdk7-downloads-1880260.html
先卸载CetnOS7自带的JDK
以slave-CentOS7为例(master-CetnOS7、slave-CentOS7上都需要卸载CetnOS7自带的JDK)
[root@slave ~]# java -version openjdk version "1.8.0_101" OpenJDK Runtime Environment (build 1.8.0_101-b13) OpenJDK 64-Bit Server VM (build 25.101-b13, mixed mode) [root@master ~]# rpm -qa |grep jdk java-1.7.0-openjdk-headless-1.7.0.111-2.6.7.2.el7_2.x86_64 java-1.8.0-openjdk-1.8.0.101-3.b13.el7_2.x86_64 java-1.8.0-openjdk-headless-1.8.0.101-3.b13.el7_2.x86_64 java-1.7.0-openjdk-1.7.0.111-2.6.7.2.el7_2.x86_64 [root@slave ~]# yum -y remove java-1.7.0-openjdk-headless-1.7.0.111-2.6.7.2.el7_2.x86_64 [root@slave ~]# yum -y remove java-1.8.0-openjdk-1.8.0.101-3.b13.el7_2.x86_64 [root@slave ~]# yum -y remove java-1.8.0-openjdk-headless-1.8.0.101-3.b13.el7_2.x86_64 [root@slave ~]# java -version -bash: /usr/bin/java: 没有那个文件或目录
master-CentOS7
[root@master ~]# wget 'http://download.Oracle.com/otn-pub/java/jdk/7u79-b15/jdk-7u79-linux-x64.tar.gz?AuthParam=1472372876_f3205a608139acb432d3c48638502428' [root@master ~]# mv jdk-7u79-linux-x64.tar.gz\?AuthParam\=1472372876_f3205a608139acb432d3c48638502428 jdk-7u79-linux-x64.tar.gz [root@master ~]# tar zxvf jdk-7u79-linux-x64.tar.gz [root@master ~]# mv jdk1.7.0_79 /usr/local/ [root@master ~]# vi /etc/profile.d/java.sh 添加 export JAVA_HOME=/usr/local/jdk1.7.0_79 export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar export PATH=$PATH:$JAVA_HOME/bin [root@master ~]# source !$ source /etc/profile.d/java.sh [root@master ~]# java -version java version "1.7.0_79" Java(TM) SE Runtime Environment (build 1.7.0_79-b15) Java HotSpot(TM) 64-Bit Server VM (build 24.79-b02, mixed mode) [root@master ~]# scp jdk-7u79-linux-x64.tar.gz slave:/root/ [root@master ~]# scp /etc/profile.d/java.sh slave:/etc/profile.d/
slave-CentOS7
[root@slave ~]# tar zxvf jdk-7u79-linux-x64.tar.gz [root@slave ~]# mv jdk1.7.0_79 /usr/local/ [root@slave ~]# source /etc/profile.d/java.sh [root@slave ~]# java -version java version "1.7.0_79" Java(TM) SE Runtime Environment (build 1.7.0_79-b15) Java HotSpot(TM) 64-Bit Server VM (build 24.79-b02, mixed mode)
安装Hadoop
master-CentOS7
[root@master ~]# wget 'http://mirror.bit.edu.cn/apache/hadoop/common/hadoop-2.7.1/hadoop-2.7.1.tar.gz' [root@master ~]# tar zxvf hadoop-2.7.1.tar.gz [root@master ~]# mv hadoop-2.7.1 /usr/local/Hadoop [root@master ~]# ls !$ ls /usr/local/hadoop bin include libexec NOTICE.txt sbin etc lib LICENSE.txt README.txt share [root@master ~]# mkdir /usr/local/hadoop/tmp /usr/local/hadoop/dfs /usr/local/hadoop/dfs/data /usr/local/hadoop/dfs/name [root@master ~]# ls /usr/local/hadoop bin dfs etc include lib libexec LICENSE.txt NOTICE.txt README.txt sbin share tmp [root@master ~]# rsync -av /usr/local/hadoop slave:/usr/local
slave-CentOS7
[root@slave ~]# ls /usr/local/hadoop bin etc lib LICENSE.txt README.txt share dfs include libexec NOTICE.txt sbin tmp
配置Hadoop
master-CentOS7
[root@master ~]# vi /usr/local/hadoop/etc/hadoop/core-site.xml 添加 <configuration> <property> <name>fs.defaultFS</name> <value>hdfs://192.168.1.182:9000</value> </property> <property> <name>hadoop.tmp.dir</name> <value>file:/usr/local/hadoop/tmp</value> </property> <property> <name>io.file.buffer.size</name> <value>131702</value> </property> </configuration> 注意master-CentOS7主机的IP [root@master ~]# vi /usr/local/hadoop/etc/hadoop/hdfs-site.xml 添加 <configuration> <property> <name>dfs.namenode.name.dir</name> <value>file:/usr/local/hadoop/dfs/name</value> </property> <property> <name>dfs.datanode.data.dir</name> <value>file:/usr/local/hadoop/dfs/data</value> </property> <property> <name>dfs.replication</name> <value>2</value> </property> <property> <name>dfs.namenode.secondary.http-address</name> <value>192.168.1.182:9001</value> </property> <property> <name>dfs.webhdfs.enabled</name> <value>true</value> </property> </configuration> 注意master-CentOS7主机的IP [root@master ~]# mv /usr/local/hadoop/etc/hadoop/mapred-site.xml.template /usr/local/hadoop/etc/hadoop/mapred-site.xml [root@master ~]# vi /usr/local/hadoop/etc/hadoop/mapred-site.xml 添加 <configuration> <property> <name>mapreduce.framework.name</name> <value>yarn</value> </property> <property> <name>mapreduce.jobhistory.address</name> <value>192.168.1.182:10020</value> </property> <property> <name>mapreduce.jobhistory.webapp.address</name> <value>192.168.1.182:19888</value> </property> </configuration> 注意master-CentOS7主机的IP [root@master ~]# vi /usr/local/hadoop/etc/hadoop/yarn-site.xml 添加 <configuration> <!-- Site specific YARN configuration properties --> <property> <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> <property> <name>yarn.nodemanager.auxservices.mapreduce.shuffle.class</name> <value>org.apache.hadoop.mapred.ShuffleHandler</value> </property> <property> <name>yarn.resourcemanager.address</name> <value>192.168.1.182:8032</value> </property> <property> <name>yarn.resourcemanager.scheduler.address</name> <value>192.168.1.182:8030</value> </property> <property> <name>yarn.resourcemanager.resource-tracker.address</name> <value>192.168.1.182:8031</value> </property> <property> <name>yarn.resourcemanager.admin.address</name> <value>192.168.1.182:8033</value> </property> <property> <name>yarn.resourcemanager.webapp.address</name> <value>192.168.1.182:8088</value> </property> <property> <name>yarn.nodemanager.resource.memory-mb</name> <value>2048</value> </property> </configuration> 注意master-CentOS7主机的IP [root@master ~]# cd /usr/local/hadoop/etc/hadoop [root@master hadoop]# vi hadoop-env.sh 更改 export JAVA_HOME=/usr/local/jdk1.7.0_79 [root@master hadoop]# vi yarn-env.sh 更改 export JAVA_HOME=/usr/local/jdk1.7.0_79 [root@master hadoop]# vi slaves 更改为 192.168.1.183 注意slave-CentOS7的IP [root@master hadoop]# rsync -av /usr/local/hadoop/etc/ slave:/usr/local/hadoop/etc/
slave-CentOS7
[root@slave ~]# cd /usr/local/hadoop/etc/hadoop/ [root@slave hadoop]# cat slaves 192.168.1.183 检查slave没问题
启动Hadoop
master-CentOS7
[root@master hadoop]# /usr/local/hadoop/bin/hdfs namenode -format [root@master hadoop]# echo $? 0 [root@master hadoop]# /usr/local/hadoop/sbin/start-all.sh [root@master hadoop]# jps 19907 ResourceManager 19604 SecondaryNameNode 19268 NameNode 20323 Jps
slave-CentOS7
[root@slave hadoop]# jps 18113 NodeManager 18509 Jps 17849 DataNode
浏览器打开
http://192.168.1.182:8088/
http://192.168.1.182:50070/
测试Hadoop
master-CentOS7
[root@master hadoop]# cd /usr/local/hadoop/ [root@master hadoop]# bin/hadoop jar ./share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.1.jar pi 10 10
停止服务
master-CentOS7
相关推荐
changecan 2020-11-19
ECSHOP专属建设 2020-11-13
88427810 2020-11-02
步知道 2020-10-27
Junzizhiai 2020-10-10
jackalwb 2020-10-05
小网管 2020-09-03
shiwenqiang 2020-09-14
85251846 2020-09-14
summerinsist 2020-08-21
87901735 2020-08-19
PinkBean 2020-08-11
85407718 2020-08-09
MichelinMessi 2020-07-30
suosuo 2020-07-28
CARBON 2020-07-28
lizhengfa 2020-07-27
88427810 2020-07-26