为spark添加入口机



从spark master:/user/hadoop/home/setupSpark目录下以root用户执行以下命令

export ENTRANCE_IP=*.*.*.*
export ENTRANCE_PASSWD='******'

# 把spark cluster的服务器添加到/etc/hosts

sshpass sshpass -p ${ENTRANCE_PASSWD} ssh   ${ENTRANCE_IP}
cat <<EOF >sparkhosts
10.140.60.85    10-140-60-85
10.140.60.86    10-140-60-86
10.140.60.87    10-140-60-87
10.140.60.88    10-140-60-88
10.140.60.89    10-140-60-89
10.140.60.90    10-140-60-90
10.140.60.91    10-140-60-91
10.140.60.92    10-140-60-92
10.140.60.95    10-140-60-95
10.140.60.96    10-140-60-96
10.140.60.97    10-140-60-97
10.140.60.98    10-140-60-98
10.140.60.99    10-140-60-99
10.140.60.100   10-140-60-100
10.140.60.101   10-140-60-101
10.140.60.103   10-140-60-103
10.140.60.104   10-140-60-104
10.140.60.107   10-140-60-107
10.140.60.108   10-140-60-108
10.140.60.109   10-140-60-109
EOF
sshpass -p ${ENTRANCE_PASSWD} scp -r  sparkhosts ${ENTRANCE_IP}:/letv
sshpass -p ${ENTRANCE_PASSWD} ssh   ${ENTRANCE_IP} "cd /letv; cat sparkhosts >> /etc/hosts"


#2.把入口机的ip添加到各spark cluster的服务器的/etc/hosts.
vim /etc/hosts
upgrade.sh distribute newslaves /etc/hosts /etc/
sshpass -p ${ENTRANCE_PASSWD} scp -r  scala-2.10.4.tgz ${ENTRANCE_IP}:/letv
sshpass -p ${ENTRANCE_PASSWD} ssh   ${ENTRANCE_IP} "cd /letv; tar -xzf scala-2.10.4.tgz;rm -rf /usr/local/scala; ln -s /letv/scala-2.10.4 /usr/local/scala"

sshpass -p ${ENTRANCE_PASSWD} scp -r  spark-1.2.0-bin-hadoop2.4 ${ENTRANCE_IP}:/letv
sshpass -p ${ENTRANCE_PASSWD} ssh   ${ENTRANCE_IP} "rm -rf /usr/local/spark; ln -s /letv/spark-1.2.0-bin-hadoop2.4 /usr/local/spark"

sshpass -p ${ENTRANCE_PASSWD} scp -r  hadoop-2.5.2 ${ENTRANCE_IP}:/letv

#在入口机上执行以下操作on entrance node
#modify hadoop-env.sh export HADOOP_CONF_DIR=/letv/hadoop-2.5.2/etc/hadoop
#modify export SPARK_CLASSPATH=/letv/hadoop-2.5.2/share/hadoop/common/lib/hadoop-gpl-compression-0.2.0-dev.jar

vim /usr/local/spark/conf/hadoop-env.sh
vim /etc/profile
#add the follow contents.
export SCALA_HOME=/usr/local/scala
export SPARK_HOME=/usr/local/spark
export PATH=$PATH:${SCALA_HOME}/bin:${SPARK_HOME}/bin

你可能感兴趣的:(spark)