首页 > 代码库 > hadoop的linux配置

hadoop的linux配置

一、新建hadoop组跟用户(密码:hadoop)

[root@localhost home]# groupadd hadoop
[root@localhost home]# useradd -g hadoop hadoop
[root@localhost home]# passwd hadoop
Changing password for user hadoop.
New password: 
BAD PASSWORD: it is based on a dictionary word
BAD PASSWORD: is too simple
Retype new password: 
passwd: all authentication tokens updated successfully.
[root@localhost home]#

二、授时服务(时间同步)

[root@localhost home]# crontab –e

[root@localhost home]# crontab -l

0 1 * * * /usr/sbin/ntpdate cn.pool.ntp.org

三、 jdk-6u45-linux-i586.bin安装(没有x权限,修改权限后执行)

[root@localhost java]# pwd

/usr/local/java

[root@localhost java]# ll

total 130600

-rwxrw-rw-. 1 root root 61927560 Jun  7  2013 hadoop-1.1.2.tar.gz

-rw-r--r--. 1 root root 71799552 Oct 14 14:33 jdk-6u45-linux-i586.bin

[root@localhost java]# chmod u+x jdk-6u45-linux-i586.bin 

[root@localhost java]# ll

total 130600

-rwxrw-rw-. 1 root root 61927560 Jun  7  2013 hadoop-1.1.2.tar.gz

-rwxr--r--. 1 root root 71799552 Oct 14 14:33 jdk-6u45-linux-i586.bin

[root@localhost java]# ./jdk-6u45-linux-i586.bin

配置环境变量(不在profile里面配置,新建一个java.sh文件,里面配置java的环境变量,profile文件会自动加载这个java.sh文件)

[root@localhost jdk1.6.0_45]# pwd

/usr/local/java/jdk1.6.0_45

[root@localhost jdk1.6.0_45]# vi /etc/profile.d/java.sh

[root@localhost jdk1.6.0_45]#

[root@localhost jdk1.6.0_45]# java

bash: java: command not found

[root@localhost jdk1.6.0_45]# source /etc/profile  (使java.sh文件配置生效)

[root@localhost jdk1.6.0_45]# java -version

java version "1.6.0_45"

Java(TM) SE Runtime Environment (build 1.6.0_45-b06)

Java HotSpot(TM) Client VM (build 20.45-b01, mixed mode, sharing)

[root@localhost jdk1.6.0_45]# javac -version

javac 1.6.0_45

[root@localhost jdk1.6.0_45]#

四、主机名修改

1、  vi /etc/sysconfig/network
把hostname改为node
验证:重启linux
     或者:hostname 主机名(这样设置是hostname临时有效,重启后失效 )最好是执行以
下source /etc/profile

2、[root@localhost jdk1.6.0_45]# vi /etc/sysconfig/network


[root@localhost jdk1.6.0_45]# hostname
localhost.localdomain
[root@localhost jdk1.6.0_45]# 
在这里需要logout一次,主机名才会生效

五、hosts文件修改(能ping通就成功了)

[root@localhost Desktop]# vi /etc/hosts


[root@localhost Desktop]# ping master
PING master (192.168.209.100) 56(84) bytes of data.
64 bytes from master (192.168.209.100): icmp_seq=1 ttl=64 time=0.488 ms
64 bytes from master (192.168.209.100): icmp_seq=2 ttl=64 time=0.083 ms
^C
--- master ping statistics ---
2 packets transmitted, 2 received, 0% packet loss, time 1374ms
rtt min/avg/max/mdev = 0.083/0.285/0.488/0.203 ms
[root@localhost Desktop]# 

六、防火墙关闭

[root@localhost Desktop]# service iptables status
Table: filter
Chain INPUT (policy ACCEPT)
num  target     prot opt source               destination         
1    ACCEPT     all  --  0.0.0.0/0            0.0.0.0/0           state RELATED,ESTABLISHED 
2    ACCEPT     icmp --  0.0.0.0/0            0.0.0.0/0           
3    ACCEPT     all  --  0.0.0.0/0            0.0.0.0/0           
4    ACCEPT     tcp  --  0.0.0.0/0            0.0.0.0/0           state NEW tcp dpt:22 
5    REJECT     all  --  0.0.0.0/0            0.0.0.0/0           reject-with icmp-host-prohibited 
Chain FORWARD (policy ACCEPT)
num  target     prot opt source               destination         
1    REJECT     all  --  0.0.0.0/0            0.0.0.0/0           reject-with icmp-host-prohibited 
Chain OUTPUT (policy ACCEPT)
num  target     prot opt source               destination         
[root@localhost Desktop]# service iptables stop
iptables: Flushing firewall rules:                         [  OK  ]
iptables: Setting chains to policy ACCEPT: filter          [  OK  ]
iptables: Unloading modules:                               [  OK  ]
 [root@localhost Desktop]# chkconfig iptables --list
iptables       0:off1:off 2:on3:on 4:on5:on 6:off
[root@localhost Desktop]# chkconfig iptables off
[root@localhost Desktop]# chkconfig iptables --list
iptables       0:off1:off 2:off3:off 4:off5:off 6:off
[root@localhost Desktop]#
[root@localhost Desktop]# service iptables status
iptables: Firewall is not running.

七、SSH 无密钥登录(切换到hadoop用户下)

切换到hadoop用户下
[root@localhost ~]# su hadoop
生成公钥跟私钥(会有3次提示,一直回车即可)
[hadoop@localhost root]$ cd
[hadoop@localhost ~]$ pwd
/home/hadoop
[hadoop@localhost ~]$ ssh-keygen -t rsa
Generating public/private rsa key pair.
Enter file in which to save the key (/home/hadoop/.ssh/id_rsa): 
Enter passphrase (empty for no passphrase): 
Enter same passphrase again: 
Your identification has been saved in /home/hadoop/.ssh/id_rsa.
Your public key has been saved in /home/hadoop/.ssh/id_rsa.pub.
The key fingerprint is:
33:09:0b:6d:30:f5:07:10:40:0d:be:99:cf:a9:a4:92 hadoop@localhost.localdomain
The key‘s randomart image is:
+--[ RSA 2048]----+
|   .*=+o.        |
|   . +.. .       |
|    o + . .      |
|     * o o       |
|    + . S        |
|     o . o       |
| .  . +          |
|E  o .           |
| .. .            |
+-----------------+
[hadoop@localhost ~]$
[hadoop@localhost ~]$ ls
[hadoop@localhost ~]$ ll -a
total 36
drwx------. 5 hadoop hadoop 4096 Feb 28 14:19 .
drwxr-xr-x. 3 root   root   4096 Feb 28 13:47 ..
-rw-------. 1 hadoop hadoop   79 Feb 28 14:23 .bash_history
-rw-r--r--. 1 hadoop hadoop   18 Feb 22  2013 .bash_logout
-rw-r--r--. 1 hadoop hadoop  176 Feb 22  2013 .bash_profile
-rw-r--r--. 1 hadoop hadoop  124 Feb 22  2013 .bashrc
drwxr-xr-x. 2 hadoop hadoop 4096 Nov 12  2010 .gnome2
drwxr-xr-x. 4 hadoop hadoop 4096 Feb 28 06:11 .mozilla
drwx------. 2 hadoop hadoop 4096 Feb 28 14:23 .ssh
[hadoop@localhost ~]$ cd .ssh/
[hadoop@localhost .ssh]$ ls
id_rsa  id_rsa.pub
[hadoop@localhost .ssh]$ ll
total 8
-rw-------. 1 hadoop hadoop 1671 Feb 28 14:23 id_rsa
-rw-r--r--. 1 hadoop hadoop  410 Feb 28 14:23 id_rsa.pub
[hadoop@localhost .ssh]$ cp id_rsa.pub authorized_keys
[hadoop@localhost .ssh]$ ll
total 12
-rw-r--r--. 1 hadoop hadoop  410 Feb 28 14:26 authorized_keys
-rw-------. 1 hadoop hadoop 1671 Feb 28 14:23 id_rsa
-rw-r--r--. 1 hadoop hadoop  410 Feb 28 14:23 id_rsa.pub
[hadoop@localhost .ssh]$ 
ssh登录
[hadoop@localhost .ssh]$ ssh master
The authenticity of host ‘master (192.168.209.100)‘ can‘t be established.
RSA key fingerprint is f0:92:0b:08:0d:9b:72:0d:ca:99:30:0a:40:7e:05:ae.
SSH第一次登录有这个提示,回车就好,然后直接ssh master 不需要密码就成功了
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added ‘master,192.168.209.100‘ (RSA) to the list of known hosts.
[hadoop@localhost ~]$ exit
logout
Connection to master closed.
[hadoop@localhost .ssh]$ ssh master
Last login: Fri Feb 28 14:27:32 2014 from master
[hadoop@localhost ~]$
退出ssh登录,进行接下来的环境配置
[hadoop@localhost ~]$ exit
Logout

说明:如果这是部署hadoop环境在linux所具备的环境配置,最后的ssh配置,后期会介绍原理分析,和集群多台服务器配置的简单方法。