环境:

  • Java:1.8.0_151

  • hadoop:2.6.5

解压spark

$ tar -xzvf spark-1.6.3-bin-hadoop2.6.tgz

配置spark

/etc/profile.d 创建spark.sh

export SPARK_HOME=/opt/modules/spark-1.6.3-bin-hadoop2.6
export PATH=$SPARK_HOME/bin:$PATH

修改spark-env.sh,spark-defaults.conf,slaves

$ cp spark-env.sh.template spark-env.sh
$ cp spark-defaults.conf.template spark-defaults.conf
$ cp slaves.template slaves

配置spark-env.sh

export JAVA_HOME=/usr/local/jdk1.8.0_151
SPARK_MASTER_HOST=kevin-master

配置spark-defaults.conf

spark.master spark://kevin-master:7077
spark.local.dir /opt/data/spark_shuffle

配置slaves

kevin-master

启动spark

$ $SPARK_HOME/sbin/start-all.sh

通过web查看spark

192.168.73.132:8080

验证spark

通过spark的示例脚本提交spark任务

${SPARK_HOME}/bin/run-example SparkPi

results matching ""

    No results matching ""