首页 > 代码库 > spark install

spark install

 

.bashrc

export HADOOP_HOME=/usr/local/hadoop/hadoop-2.6.4
export HADOOP_CONF_DIR=/usr/local/hadoop/hadoop-2.6.4/etc/hadoop
export SCALA_HOME=/usr/local/scala/scala-2.10.6
export SPARK_HOME=/usr/local/spark/spark-2.0.0-bin-hadoop2.6
export JAVA_HOME=/usr/java/jdk1.8.0
export JRE_HOME=${JAVA_HOME}/jre
export CLASS_PATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:${SPARK_HOME}/bin:${SPARK_HOME}/sbin:${SCALA_HOME}/bin:${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:$PATH

 

 

spark_env.sh

 

export JAVA_HOME=/usr/java/jdk1.8.0

export SCALA_HOME=/usr/local/scala/scala-2.10.6

export HADOOP_HOME=/usr/local/hadoop/hadoop-2.6.4

export HADOOP_CONF_DIR=/usr/local/hadoop/hadoop-2.6.4/etc/hadoop

export SPARK_MASTER_IP=Master

export SPARK_WORKER_MEMORY=1g

export SPARK_EXECUTOR_MEMORY=1g

export SPARK_DRIVER_MEMORY=1G

export SPARK_WORKER_CORES=8

 

slaves

         Worker1

         Worker2

 

 

 

 

export SCALA_HOME=/usr/local/scala/scala-2.10.6

export SPARK_HOME=/usr/local/spark/spark-2.0.0-bin-hadoop2.6

 

 

spark-defaults.conf

 

spark.executor.extraJavaOptions  -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three"

spark.eventLog.enabled           true

spark.eventLog.dir               hdfs://Master:9000/historyserverforSpark

spark.yarn.historyServer.address Master:18080

spark.history.fs.logDirectory hdfs://Master:9000/historyserverforSpark

 

 

hadoop dfs -r 

hadoop dfs -mkdir /historyserverforSpark

 

spark-submit --class org.apache.spark.examples.SparkPi --master spark://Master:7077 ../examples/jars/spark-examples_2.11-2.0.0.jar    1000

  --class org.apache.spark.examples.SparkPi \

  --master spark://Master:7077 \

  ../examples/jars/spark-examples_2.11-2.0.0.jar    1000

spark install