## 主机名 hadoop4 IP 192.168.0.106 作为DNS服务器 ## 安装bind相应的软件包: [root@hadoop4 ~]# yum -y install bind bind-utils bind-chroot ## 检查是否安装bind: [root@hadoop4 ~]# rpm -qa | grep '^bind' bind-chroot-9.8.2-0.30.rc1.el6_6.2.x86_64 bind-9.8.2-0.30.rc1.el6_6.2.x86_64 bind-libs-9.8.2-0.30.rc1.el6_6.2.x86_64 bind-utils-9.8.2-0.30.rc1.el6_6.2.x86_64 ## 修改/etc/named.conf [root@hadoop4 ~]# vim /etc/named.conf listen-on port 53 { any; }; //将原来的127.0.0.1改成any allow-query { any; }; //将原来的localhost改成any ## 修改/etc/named.rfc1912.zones,将两个zone增加在/etc/named.rfc1912.zones 文件最后(也可以直接写在named.conf中) [root@hadoop4 ~]# vim /etc/named.rfc1912.zones zone "hadoop.com" IN { type master; file "named.hadoop.com"; allow-update { none; }; }; zone "0.168.192.in-addr.arpa" IN { type master; file "named.192.168.0.zone"; allow-update { none; }; }; ## 配置正向解析文件 [root@hadoop4 ~]# cd /var/named [root@hadoop4 named]# cp -p named.localhost named.hadoop.com [root@hadoop4 named]# vim named.hadoop.com $TTL 1D @ IN SOA hadoop4.hadoop.com. grid.hadoop4.hadoop.com. ( 0 ; serial 1D ; refresh 1H ; retry 1W ; expire 3H ) ; minimum @ IN NS hadoop4.hadoop.com. hadoop4.hadoop.com. IN A 192.168.0.106 hadoop5.hadoop.com. IN A 192.168.0.107 hadoop6.hadoop.com. IN A 192.168.0.108 ## 配置反向解析文件 [root@hadoop4 ~]# cd /var/named [root@hadoop4 named]# cp -p named.localhost named.192.168.0.zone [root@hadoop4 named]# vim named.192.168.0.zone $TTL 1D @ IN SOA hadoop4.hadoop.com. grid.hadoop4.hadoop.com. ( 0 ; serial 1D ; refresh 1H ; retry 1W ; expire 3H ) ; minimum @ IN NS hadoop4.hadoop.com. 106 IN PTR hadoop4.hadoop.com. 107 IN PTR hadoop5.hadoop.com. 108 IN PTR hadoop6.hadoop.com. ## 添加DNS域名服务器ip,在每个节点的 /etc/sysconfig/network-scripts/ifcfg-eth0 文件中加入服务器ip地址 [root@hadoop4 ~]# vim /etc/sysconfig/network-scripts/ifcfg-eth0 DEVICE=eth0 HWADDR=08:00:27:26:b8:c1 TYPE=Ethernet UUID=dabd3173-5f69-454c-aa97-5cc2a09dfba0 ONBOOT=yes NM_CONTROLLED=yes #BOOTPROTO=dhcp BOOTPROTO=static BROADCAST=192.168.0.255 IPADDR=192.168.0.106 NETMASK=255.255.255.0 GATEWAY=192.168.0.1 DNS1=192.168.0.106 [root@hadoop4 ~]# service network restart ## 启动DNS [root@hadoop4 ~]# service named start 启动 named: [确定] ## 设置开机启动 [root@hadoop4 ~]# chkconfig named on ## 查看日志确认DNS服务是否正常 [root@hadoop4 ~]# tail -n 30 /var/log/messages | grep named Apr 26 13:48:11 hadoop4 named[5443]: sizing zone task pool based on 8 zones Apr 26 13:48:11 hadoop4 named[5443]: using built-in DLV key for view _default Apr 26 13:48:11 hadoop4 named[5443]: set up managed keys zone for view _default, file '/var/named/dynamic/managed-keys.bind' Apr 26 13:48:11 hadoop4 named[5443]: Warning: 'empty-zones-enable/disable-empty-zone' not set: disabling RFC 1918 empty zones Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 127.IN-ADDR.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 254.169.IN-ADDR.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 2.0.192.IN-ADDR.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 100.51.198.IN-ADDR.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 113.0.203.IN-ADDR.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 255.255.255.255.IN-ADDR.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.IP6.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: D.F.IP6.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 8.E.F.IP6.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 9.E.F.IP6.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: A.E.F.IP6.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: B.E.F.IP6.ARPA Apr 26 13:48:11 hadoop4 named[5443]: automatic empty zone: 8.B.D.0.1.0.0.2.IP6.ARPA Apr 26 13:48:11 hadoop4 named[5443]: command channel listening on 127.0.0.1#953 Apr 26 13:48:11 hadoop4 named[5443]: command channel listening on ::1#953 Apr 26 13:48:11 hadoop4 named[5443]: zone 0.in-addr.arpa/IN: loaded serial 0 Apr 26 13:48:11 hadoop4 named[5443]: zone 1.0.0.127.in-addr.arpa/IN: loaded serial 0 Apr 26 13:48:11 hadoop4 named[5443]: zone 0.168.192.in-addr.arpa/IN: loaded serial 0 Apr 26 13:48:11 hadoop4 named[5443]: zone 1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.ip6.arpa/IN: loaded serial 0 Apr 26 13:48:11 hadoop4 named[5443]: zone hadoop.com/IN: loaded serial 0 Apr 26 13:48:11 hadoop4 named[5443]: zone localhost.localdomain/IN: loaded serial 0 Apr 26 13:48:11 hadoop4 named[5443]: zone localhost/IN: loaded serial 0 Apr 26 13:48:11 hadoop4 named[5443]: managed-keys-zone ./IN: loaded serial 3 Apr 26 13:48:11 hadoop4 named[5443]: running Apr 26 13:48:11 hadoop4 named[5443]: zone hadoop.com/IN: sending notifies (serial 0) Apr 26 13:48:11 hadoop4 named[5443]: zone 0.168.192.in-addr.arpa/IN: sending notifies (serial 0) ## 验证测试 [root@hadoop4 ~]# nslookup hadoop4.hadoop.com Server: 192.168.0.106 Address: 192.168.0.106#53 Name: hadoop4.hadoop.com Address: 192.168.0.106 [root@hadoop4 ~]# nslookup 192.168.0.107 Server: 192.168.0.106 Address: 192.168.0.106#53 107.0.168.192.in-addr.arpa name = hadoop5.hadoop.com.
由于我在自己的实验环境里节点都只设置了主机名(hostname),而没有设置域名(domainname),所以我更改了一些设置,来达到仅仅解析hostname的目的
[root@hadoop4 ~]# cat /etc/named.rfc1912.zones zone "." IN { type master; file "named.hadoop.com"; allow-update { none; }; }; zone "0.168.192.in-addr.arpa" IN { type master; file "named.192.168.0.zone"; allow-update { none; }; }; [root@hadoop4 ~]# cat /var/named/named.hadoop.com $TTL 1D @ IN SOA hadoop4. grid.hadoop4. ( 0 ; serial 1D ; refresh 1H ; retry 1W ; expire 3H ) ; minimum @ IN NS hadoop4. hadoop4 IN A 192.168.0.106 hadoop5 IN A 192.168.0.107 hadoop6 IN A 192.168.0.108 [root@hadoop4 ~]# cat /var/named/named.192.168.0.zone $TTL 1D @ IN SOA hadoop4. grid.hadoop4. ( 0 ; serial 1D ; refresh 1H ; retry 1W ; expire 3H ) ; minimum @ IN NS hadoop4. 106 IN PTR hadoop4. 107 IN PTR hadoop5. 108 IN PTR hadoop6. [root@hadoop4 ~]# nslookup hadoop6 Server: 192.168.0.106 Address: 192.168.0.106#53 Name: hadoop6 Address: 192.168.0.108 [root@hadoop4 ~]# nslookup 192.168.0.107 Server: 192.168.0.106 Address: 192.168.0.106#53 107.0.168.192.in-addr.arpa name = hadoop5.