1.上传spark-2.2.2-bin-hadoop2.7.tgz
2.解压文件
tar -zxvf spark-2.2.2-bin-hadoop2.7.tgz -C /usr/local/
3.进入conf/下把spark-env.sh.template改为spark-env.sh
cd /usr/local/spark-2.2.2-bin-hadoop2.7/conf/mv spark-env.sh.template spark-env.sh
4.修改配置文件 spark-env.sh
1.编辑 spark-env.sh
vi spark-env.sh
2.修改内容如下
export JAVA_HOME=/usr/local/jdk1.8.0_211export SPARK_MASTER_HOST=hadoop01export SPARK_MASTER_PORT=7077
5.把slaves.template改为slaves
mv slaves.template slaves
6.修改配置文件slaves
1.编辑 slaves
vim slaves
2.修改内容如下
hadoop02hadoop03
7.配置环境变量
1.编辑
vim /etc/profile
2.内容如下
export SPARK_HOME=/usr/local/spark-2.2.2-bin-hadoop2.7export PATH=$PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin
3.重置环境变量
source /etc/profile
8.分发到其他节点
scp -r /usr/local/spark-2.2.2-bin-hadoop2.7 hadoop02:/usr/local/scp -r /usr/local/spark-2.2.2-bin-hadoop2.7 hadoop03:/usr/local/
9.OK