spark 集群

salve 上, root@slave2:/opt/spark-2.1.0-bin-hadoop2.6/conf# vim spark-env.sh:

export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64
export SCALA_HOME=/opt/scala-2.11.8
export SPARK_MASTER_IP=namenode
export HADOOP_HOME=/opt/hadoop-2.6.5
export SPARK_WORKER_CORES=3
export SPARK_WORKER_MEMORY=12g
export HADOOP_CONF_DIR=/opt/hadoop-2.6.5/etc/hadoop
export HADOOP_CONF_LIB_NATIVE_DIR=/opt/hadoop-2.6.5/lib/native
export HADOOP_MAPRED_HOME=/opt/hadoop-2.6.5
export HADOOP_COMMON_HOME=/opt/hadoop-2.6.5
export HADOOP_HDFS_HOME=/opt/hadoop-2.6.5
export YARN_HOME=/opt/hadoop-2.6.5
export HADOOP_INSTALL=/opt/hadoop-2.6.5
export YARN_CONF_DIR=/opt/hadoop-2.6.5/etc/hadoop
export SPARK_HOME=/opt/spark-2.1.0-bin-hadoop2.6
export SPARK_CLASSPATH=/opt/hadoop-2.6.5/etc/hadoop:/opt/hadoop-2.6.5/share/hadoop/common/lib/*:/opt/hadoop-2.6.5/share/hadoop/common/*:/opt/hadoop-2.6.5/share/hadoop/hdfs:/opt/hadoop-2.6.5/share/hadoop/hdfs/lib/*:/opt/hadoop-2.6.5/share/hadoop/hdfs/*:/opt/hadoop-2.6.5/share/hadoop/yarn/lib/*:/opt/hadoop-2.6.5/share/hadoop/yarn/*:/opt/hadoop-2.6.5/share/hadoop/mapreduce/lib/*:/opt/hadoop-2.6.5/share/hadoop/mapreduce/*:/opt/hadoop-2.6.5/contrib/capacity-scheduler/*.jar

slave2上 yarn-site.xml







yarn.nodemanager.aux-services
mapreduce_shuffle


yarn.nodemanager.aux-services.mapreduce.shuffle.class
org.apache.hadoop.mapred.ShuffleHandler


yarn.resourcemanager.address
master:8032


yarn.resourcemanager.scheduler.address
master:8030


yarn.resourcemanager.resource-tracker.address
master:8035


yarn.resourcemanager.admin.address
master:8033


yarn.resourcemanager.webapp.address
0.0.0.0:8088


yarn.nodemanager.resource.memory-mb
7168


yarn.scheduler.minimum-allocation-mb
1024



yarn.nodemanager.pmem-check-enabled
false



yarn.nodemanager.vmem-check-enabled
false


/opt/spark-2.1.0-bin-hadoop2.6/conf# vim spark-defaults.conf:

# Example:
# spark.master                     spark://master:7077
# spark.eventLog.enabled           true
# spark.eventLog.dir               hdfs://namenode:8021/directory
# spark.serializer                 org.apache.spark.serializer.KryoSerializer
spark.driver.memory              12g
spark.scheduler.mode             FAIR
# spark.executor.extraJavaOptions  -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three"

/opt/spark-2.1.0-bin-hadoop2.6/conf# vim slaves

slave1
slave2
slave3
slave4

你可能感兴趣的:(spark 集群)