Spark集群启动脚本(start-spark-cluster)
cd /usr/local/binsudo vi start-spark-cluster
脚本内容如下:
#!/bin/bashSPARK_HOME=/home/vagrant/modules/sparkecho "start spark-cluster-------------------------------------------------"for i in bigdata-node1dossh $i "source /etc/profile;${SPARK_HOME}/sbin/start-all.sh"ssh $i "source /etc/profile;${SPARK_HOME}/sbin/start-history-server.sh"donesleep 5sjpsxexit 0
设置脚本执行权限:
sudo chmod +x start-spark-clustersudo chown vagrant:vagrant start-spark-cluster
Spark集群停止脚本(stop-spark-cluster)
cd /usr/local/binsudo vi stop-spark-cluster
脚本内容如下:
#!/bin/bashSPARK_HOME=/home/vagrant/modules/sparkecho "stop spark-cluster"for i in bigdata-node1 bigdata-node2 bigdata-node3dossh $i "source /etc/profile;${SPARK_HOME}/sbin/stop-history-server.sh"ssh $i "source /etc/profile;${SPARK_HOME}/sbin/stop-all.sh"donesleep 5sjpsxexit 0
设置脚本执行权限:
sudo chmod +x stop-spark-clustersudo chown vagrant:vagrant stop-spark-cluster
分发(选做):
# 使用root账户分发至其他节点sudo scp -r /usr/local/bin/*-spark-cluster root@bigdata-node2:/usr/local/bin/sudo scp -r /usr/local/bin/*-spark-cluster root@bigdata-node3:/usr/local/bin/
