1.下载安装包
Index of /dist/sparkhttps://archive.apache.org/dist/spark/
2.上传安装包
3.解压安装包
4.设置home
vi /etc/profile
#spark-3.1.3
export SPARK_HOME=/usr/local/software/spark-3.1.3-bin-hadoop3.2
export PATH=$PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin
5.确认文件生效
source /etc/profile
6.进入spark/conf目录
cp spark-env.sh.template spark-env.sh
7.添加设置必要的参数
# Hadoop配置文件目录
export HADOOP_CONF_DIR=$HADOOP_HOME/usr/local/software/hadoop-3.1.3
# YARN配置文件目录
export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop
# SPARK目录
export SPARK_HOME=/usr/local/software/spark-3.1.3-bin-hadoop3.2
# SPARK执行文件目录
export PATH=$SPARK_HOME/bin:$PATH
# 使Spark可读写HDFS中的数据
export SPARK_DIST_CLASSPATH=$(hadoop classpath)
# 配置JAVA_HOME
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.402.b06-1.el7_9.x86_64
8.启动spark
./sbin/start-all.sh
ps -ef | grep spark