歡迎來到Linux教程網
Linux教程網
Linux教程網
Linux教程網
Linux教程網 >> Linux綜合 >> 學習Linux >> CentOS 7 安裝Hadoop 2.7.1

CentOS 7 安裝Hadoop 2.7.1

日期:2017/3/6 9:13:34   编辑:學習Linux

CentOS 7 安裝Hadoop 2.7.1


CentOS 7 安裝Hadoop 2.7.1


兩台機器 CentOS7(機器名分別為master-CentOS7、slave-CentOS7) 內存2G (筆記本開虛擬機快撐不住了╮(╯-╰)╭
CentOS7 與 CetnOS6 有一些區別

網絡配置

master-CentOS7

[root@localhost ~]# vi /etc/sysconfig/network-scripts/ifcfg-eno16777736TYPE=EthernetBOOTPROTO=staticDEFROUTE=yesPEERDNS=yesPEERROUTES=yesIPV4_FAILURE_FATAL=noIPV6INIT=yesIPV6_AUTOCONF=yesIPV6_DEFROUTE=yesIPV6_PEERDNS=yesIPV6_PEERROUTES=yesIPV6_FAILURE_FATAL=noNAME=eno16777736UUID=b30f5765-ecd7-4dba-a0ed-ebac92c836bdDEVICE=eno16777736ONBOOT=yesIPADDR=192.168.1.182NETMASK=255.255.255.0GATEWAY=192.168.1.1DNS1=114.114.114.114DNS2=8.8.4.4網絡信息根據自己實際的網絡情況配置。[root@localhost ~]# systemctl restart network[root@localhost ~]# ifconfig

slave-CentOS7

[root@localhost ~]# vi /etc/sysconfig/network-scripts/ifcfg-eno16777736TYPE=EthernetBOOTPROTO=staticDEFROUTE=yesPEERDNS=yesPEERROUTES=yesIPV4_FAILURE_FATAL=noIPV6INIT=yesIPV6_AUTOCONF=yesIPV6_DEFROUTE=yesIPV6_PEERDNS=yesIPV6_PEERROUTES=yesIPV6_FAILURE_FATAL=noNAME=eno16777736UUID=b30f5765-ecd7-4dba-a0ed-ebac92c836bdDEVICE=eno16777736ONBOOT=yesIPADDR=192.168.1.183NETMASK=255.255.255.0GATEWAY=192.168.1.1DNS1=114.114.114.114DNS2=8.8.4.4網絡信息根據自己實際的網絡情況配置。[root@localhost ~]# systemctl restart network[root@localhost ~]# ifconfig

設置hosts、hostname

master-CentOS7

[root@localhost ~]# vi /etc/hosts添加192.168.1.182 master192.168.1.183 slave[root@localhost ~]# vi /etc/hostnamelocalhost.localdomain內容修改為master

slave-CentOS7

[root@localhost ~]# vi /etc/hosts添加192.168.1.182 master192.168.1.183 slave[root@localhost ~]# vi /etc/hostnamelocalhost.localdomain內容修改為slave

關閉selinux

master-CentOS7

[root@master ~]# getenforceEnforcing[root@master ~]# vi /etc/selinux/configSELINUX=enforcing修改為SELINUX=disabled保存重啟[root@master ~]# getenforceDisabled

slave-CentOS7

[root@slave ~]# getenforceEnforcing[root@slave ~]# vi /etc/selinux/configSELINUX=enforcing修改為SELINUX=disabled保存重啟[root@slave ~]# getenforceDisabled

關閉firewalld

master-CentOS7

[root@master ~]# systemctl disable firewalldRemoved symlink /etc/systemd/system/dbus-org.fedoraproject.FirewallD1.service.Removed symlink /etc/systemd/system/basic.target.wants/firewalld.service.[root@master ~]# systemctl stop firewalld[root@master ~]# iptables -nvLChain INPUT (policy ACCEPT 0 packets, 0 bytes) pkts bytes target     prot opt in     out     source               destinationChain FORWARD (policy ACCEPT 0 packets, 0 bytes) pkts bytes target     prot opt in     out     source               destinationChain OUTPUT (policy ACCEPT 0 packets, 0 bytes) pkts bytes target     prot opt in     out     source               destination[root@master ~]# yum install -y iptables-services[root@master ~]# service iptables saveiptables: Saving firewall rules to /etc/sysconfig/iptables:[  確定  ][root@master ~]# systemctl enable iptablesCreated symlink from /etc/systemd/system/basic.target.wants/iptables.service to /usr/lib/systemd/system/iptables.service.

slave-CentOS7

[root@slave ~]# systemctl disable firewalldRemoved symlink /etc/systemd/system/dbus-org.fedoraproject.FirewallD1.service.Removed symlink /etc/systemd/system/basic.target.wants/firewalld.service.[root@slave ~]# systemctl stop firewalld[root@slave ~]# iptables -nvLChain INPUT (policy ACCEPT 0 packets, 0 bytes) pkts bytes target     prot opt in     out     source               destination        Chain FORWARD (policy ACCEPT 0 packets, 0 bytes) pkts bytes target     prot opt in     out     source               destination        Chain OUTPUT (policy ACCEPT 0 packets, 0 bytes) pkts bytes target     prot opt in     out     source               destination        [root@slave ~]# yum install -y iptables-services[root@slave ~]# service iptables saveiptables: Saving firewall rules to /etc/sysconfig/iptables:[  確定  ][root@slave ~]# systemctl enable iptablesCreated symlink from /etc/systemd/system/basic.target.wants/iptables.service to /usr/lib/systemd/system/iptables.service.

密鑰登陸

master-CentOS7

[root@master ~]# ssh-keygen一直回車[root@master ~]# cat .ssh/id_rsa.pub復制~/.ssh/id_rsa.pub 內容

slave-CentOS7

[root@slave ~]# vi .ssh/authorized_keys復制~/.ssh/id_rsa.pub 內容到 ~/.ssh/authorized_keys時報錯".ssh/authorized_keys" E212: Can't open file for writing解決方案[root@slave ~]# ls -ld .sshls: 無法訪問.ssh: 沒有那個文件或目錄[root@slave ~]# mkdir .ssh; chmod 700 .ssh[root@slave ~]# ls -ld .sshdrwx------ 2 root root 6 8月  28 15:59 .ssh[root@slave ~]# vi .ssh/authorized_keys復制~/.ssh/id_rsa.pub 內容到 ~/.ssh/authorized_keys[root@slave ~]# ls -l !$ls -l .ssh/authorized_keys-rw-r--r-- 1 root root 418 8月  28 16:02 .ssh/authorized_keys

master-CentOS7

[root@master ~]# vi .ssh/authorized_keys復制~/.ssh/id_rsa.pub 內容到 ~/.ssh/authorized_keys

測試

master-CentOS7

[root@master ~]# ssh master[root@master ~]# exit[root@master ~]# ssh slave[root@slave ~]# exit

安裝JDK

hadoop2.7 需要安裝jdk1.7版本,下載地址http://www.oracle.com/technetwork/java/javase/downloads/jdk7-downloads-1880260.html

先卸載CetnOS7自帶的JDK 以slave-CentOS7為例(master-CetnOS7、slave-CentOS7上都需要卸載CetnOS7自帶的JDK)

[root@slave ~]# java -versionopenjdk version "1.8.0_101"OpenJDK Runtime Environment (build 1.8.0_101-b13)OpenJDK 64-Bit Server VM (build 25.101-b13, mixed mode)[root@master ~]# rpm -qa |grep jdkjava-1.7.0-openjdk-headless-1.7.0.111-2.6.7.2.el7_2.x86_64java-1.8.0-openjdk-1.8.0.101-3.b13.el7_2.x86_64java-1.8.0-openjdk-headless-1.8.0.101-3.b13.el7_2.x86_64java-1.7.0-openjdk-1.7.0.111-2.6.7.2.el7_2.x86_64[root@slave ~]# yum -y remove java-1.7.0-openjdk-headless-1.7.0.111-2.6.7.2.el7_2.x86_64[root@slave ~]# yum -y remove java-1.8.0-openjdk-1.8.0.101-3.b13.el7_2.x86_64[root@slave ~]# yum -y remove java-1.8.0-openjdk-headless-1.8.0.101-3.b13.el7_2.x86_64[root@slave ~]# java -version-bash: /usr/bin/java: 沒有那個文件或目錄

master-CentOS7

[root@master ~]# wget 'http://download.oracle.com/otn-pub/java/jdk/7u79-b15/jdk-7u79-linux-x64.tar.gz?AuthParam=1472372876_f3205a608139acb432d3c48638502428' [root@master ~]# mv jdk-7u79-linux-x64.tar.gz\?AuthParam\=1472372876_f3205a608139acb432d3c48638502428  jdk-7u79-linux-x64.tar.gz[root@master ~]# tar zxvf jdk-7u79-linux-x64.tar.gz[root@master ~]# mv jdk1.7.0_79 /usr/local/[root@master ~]# vi /etc/profile.d/java.sh添加export JAVA_HOME=/usr/local/jdk1.7.0_79export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jarexport PATH=$PATH:$JAVA_HOME/bin[root@master ~]# source !$source /etc/profile.d/java.sh[root@master ~]# java -versionjava version "1.7.0_79"Java(TM) SE Runtime Environment (build 1.7.0_79-b15)Java HotSpot(TM) 64-Bit Server VM (build 24.79-b02, mixed mode)[root@master ~]# scp jdk-7u79-linux-x64.tar.gz slave:/root/[root@master ~]# scp /etc/profile.d/java.sh slave:/etc/profile.d/

slave-CentOS7

[root@slave ~]# tar zxvf jdk-7u79-linux-x64.tar.gz[root@slave ~]# mv jdk1.7.0_79 /usr/local/[root@slave ~]# source /etc/profile.d/java.sh[root@slave ~]# java -versionjava version "1.7.0_79"Java(TM) SE Runtime Environment (build 1.7.0_79-b15)Java HotSpot(TM) 64-Bit Server VM (build 24.79-b02, mixed mode)

安裝Hadoop

master-CentOS7

[root@master ~]# wget 'http://mirror.bit.edu.cn/apache/hadoop/common/hadoop-2.7.1/hadoop-2.7.1.tar.gz'[root@master ~]# tar zxvf hadoop-2.7.1.tar.gz[root@master ~]# mv hadoop-2.7.1 /usr/local/Hadoop[root@master ~]# ls !$ls /usr/local/hadoopbin  include  libexec      NOTICE.txt  sbinetc  lib      LICENSE.txt  README.txt  share[root@master ~]# mkdir /usr/local/hadoop/tmp  /usr/local/hadoop/dfs  /usr/local/hadoop/dfs/data  /usr/local/hadoop/dfs/name[root@master ~]# ls /usr/local/hadoopbin  dfs  etc  include  lib  libexec  LICENSE.txt  NOTICE.txt  README.txt  sbin  share  tmp[root@master ~]# rsync -av /usr/local/hadoop  slave:/usr/local

slave-CentOS7

[root@slave ~]# ls /usr/local/hadoopbin  etc      lib      LICENSE.txt  README.txt  sharedfs  include  libexec  NOTICE.txt   sbin        tmp

配置Hadoop

master-CentOS7

[root@master ~]# vi /usr/local/hadoop/etc/hadoop/core-site.xml添加<configuration>    <property>        <name>fs.defaultFS</name>        <value>hdfs://192.168.1.182:9000</value>    </property>    <property>        <name>hadoop.tmp.dir</name>        <value>file:/usr/local/hadoop/tmp</value>    </property>    <property>        <name>io.file.buffer.size</name>        <value>131702</value>    </property></configuration>注意master-CentOS7主機的IP[root@master ~]# vi /usr/local/hadoop/etc/hadoop/hdfs-site.xml添加<configuration>    <property>        <name>dfs.namenode.name.dir</name>        <value>file:/usr/local/hadoop/dfs/name</value>    </property>    <property>        <name>dfs.datanode.data.dir</name>        <value>file:/usr/local/hadoop/dfs/data</value>    </property>    <property>        <name>dfs.replication</name>        <value>2</value>    </property>    <property>        <name>dfs.namenode.secondary.http-address</name>        <value>192.168.1.182:9001</value>    </property>    <property>    <name>dfs.webhdfs.enabled</name>    <value>true</value>    </property></configuration>注意master-CentOS7主機的IP[root@master ~]# mv /usr/local/hadoop/etc/hadoop/mapred-site.xml.template  /usr/local/hadoop/etc/hadoop/mapred-site.xml[root@master ~]# vi /usr/local/hadoop/etc/hadoop/mapred-site.xml添加<configuration>    <property>        <name>mapreduce.framework.name</name>        <value>yarn</value>    </property>    <property>        <name>mapreduce.jobhistory.address</name>        <value>192.168.1.182:10020</value>    </property>    <property>        <name>mapreduce.jobhistory.webapp.address</name>        <value>192.168.1.182:19888</value>    </property></configuration>注意master-CentOS7主機的IP[root@master ~]# vi /usr/local/hadoop/etc/hadoop/yarn-site.xml添加<configuration><!-- Site specific YARN configuration properties -->    <property>        <name>yarn.nodemanager.aux-services</name>        <value>mapreduce_shuffle</value>    </property>    <property>        <name>yarn.nodemanager.auxservices.mapreduce.shuffle.class</name>        <value>org.apache.hadoop.mapred.ShuffleHandler</value>    </property>    <property>        <name>yarn.resourcemanager.address</name>        <value>192.168.1.182:8032</value>    </property>    <property>        <name>yarn.resourcemanager.scheduler.address</name>        <value>192.168.1.182:8030</value>    </property>    <property>        <name>yarn.resourcemanager.resource-tracker.address</name>        <value>192.168.1.182:8031</value>    </property>    <property>        <name>yarn.resourcemanager.admin.address</name>        <value>192.168.1.182:8033</value>    </property>    <property>        <name>yarn.resourcemanager.webapp.address</name>        <value>192.168.1.182:8088</value>    </property>    <property>        <name>yarn.nodemanager.resource.memory-mb</name>        <value>2048</value>    </property></configuration>注意master-CentOS7主機的IP[root@master ~]# cd /usr/local/hadoop/etc/hadoop[root@master hadoop]# vi hadoop-env.sh更改export JAVA_HOME=/usr/local/jdk1.7.0_79[root@master hadoop]# vi yarn-env.sh更改export JAVA_HOME=/usr/local/jdk1.7.0_79[root@master hadoop]# vi slaves更改為192.168.1.183注意slave-CentOS7的IP[root@master hadoop]# rsync -av /usr/local/hadoop/etc/ slave:/usr/local/hadoop/etc/

slave-CentOS7

[root@slave ~]# cd /usr/local/hadoop/etc/hadoop/[root@slave hadoop]# cat slaves192.168.1.183檢查slave沒問題

啟動Hadoop

master-CentOS7

[root@master hadoop]# /usr/local/hadoop/bin/hdfs namenode -format[root@master hadoop]# echo $?0[root@master hadoop]# /usr/local/hadoop/sbin/start-all.sh[root@master hadoop]# jps19907 ResourceManager19604 SecondaryNameNode19268 NameNode20323 Jps

slave-CentOS7

[root@slave hadoop]# jps18113 NodeManager18509 Jps17849 DataNode

浏覽器打開 http://192.168.1.182:8088/ http://192.168.1.182:50070/

測試Hadoop

master-CentOS7

[root@master hadoop]# cd /usr/local/hadoop/[root@master hadoop]# bin/hadoop jar ./share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.1.jar pi 10 10

停止服務

master-CentOS7

[root@master hadoop]# cd /usr/local/hadoop[root@master hadoop]# sbin/stop-all.sh

  • 如果提示 copyFromLocal: Cannot create directory /123/. Name node is in safe mode. 這是因為開啟了安全模式 解決方法: cd /usr/local/Hadoop bin/hdfs dfsadmin -safemode leave

http://xxxxxx/Linuxjc/1156033.html TechArticle

Copyright © Linux教程網 All Rights Reserved