下载:
    http://mirrors.tuna.tsinghua.edu.cn/apache/spark/spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz

    tar -xvf spark-2.4.4-bin-hadoop2.7.tar -C /home
    mv spark-2.4.4-bin-hadoop2.7 spark-2.4.4

    cp spark-env.sh.template spark-env.sh

    vim spark-env.sh

    export JAVA_HOME=/home/jdk1.8.0_211
    export SPARK_MASTER_IP=bigdata-1.mid.neu.com
    export SPARK_WORKER_MEMORY=1g
    export HADOOP_CONF_DIR=/home/hadoop-3.1.2/etc/hadoop

    cp slaves.template slaves

    修改slaves
    bigdata-2.mid.neu.com
    bigdata-3.mid.neu.com
    bigdata-4.mid.neu.com

    把spark安装包分发给其他节点
    scp -r spark-2.4.4/ 10.10.195.54:/home
    scp -r spark-2.4.4/ 10.10.195.55:/home
    scp -r spark-2.4.4/ 10.10.195.56:/home

    配置所有节点环境变量:
    vim /etc/profile

    export SPARK_HOME=/home/spark-2.4.4
    export PATH=$PATH:$SPARK_HOME/bin

    source /etc/profile

    启动
    sbin/start-all.sh