sparkconf.sh 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778
  1. #!/usr/bin/env bash
  2. # File: sparkconf.sh
  3. # Name: D.Saravanan
  4. # Date: 11/09/2020
  5. # Bash script to install and configure Spark-3.0.1 in Hadoop-3.2.1
  6. user='raman'
  7. # Nodes
  8. mnode='172.17.0.2'
  9. enode='172.17.0.5'
  10. nodes='172.17.0.3 172.17.0.4'
  11. for ip in $mnode $enode $nodes; do
  12. ssh $user@$ip <<EOF
  13. if [ $ip == $mnode ]
  14. then
  15. # scala installation
  16. wget -c https://downloads.lightbend.com/scala/2.11.12/scala-2.11.12.tgz -P /home/$user/Downloads/
  17. tar -xvzf /home/$user/Downloads/scala-2.11.12.tgz
  18. mv /home/$user/scala-2.11.12/ /usr/local/hadoop/scala/
  19. # spark installation
  20. wget -c https://mirrors.estointernet.in/apache/spark/spark-3.0.1/spark-3.0.1-bin-hadoop3.2.tgz -P /home/$user/Downloads/
  21. tar -xvzf /home/$user/Downloads/spark-3.0.1-bin-hadoop3.2.tgz
  22. mv /home/$user/spark-3.0.1-bin-hadoop3.2/ /usr/local/hadoop/spark/
  23. # spark-env.sh
  24. cp /usr/local/hadoop/spark/conf/spark-env.sh.template /usr/local/hadoop/spark/conf/spark-env.sh
  25. echo " " >> /usr/local/hadoop/spark/conf/spark-env.sh
  26. sed -i '$ a export HADOOP_HOME=\/usr\/local\/hadoop \
  27. \nexport HADOOP_CONF_DIR=\$HADOOP_HOME\/etc\/hadoop \
  28. \nexport YARN_CONF_DIR=\$HADOOP_HOME=\$HADOOP_HOME\/etc\/hadoop \
  29. \nexport SPARK_LOG_DIR=\$HADOOP_HOME\/spark\/log \
  30. \nexport SPARK_WORKER_DIR=\$HADOOP_HOME\/spark\/work \
  31. \nexport JAVA_HOME=\/usr\/lib\/jvm\/adoptopenjdk-8-hotspot-amd64 \
  32. \nexport SPARK_HOME=\/usr\/local\/hadoop\/spark \
  33. \nexport SCALA_HOME=\/usr\/local\/hadoop\/scala \
  34. \nexport SPARK_MASTER_HOST=172.17.0.2' /usr/local/hadoop/spark/conf/spark-env.sh
  35. # slaves
  36. cp /usr/local/hadoop/spark/conf/slaves.template /usr/local/hadoop/spark/conf/slaves
  37. sed -i '/localhost/s/^/#/g' /usr/local/hadoop/spark/conf/slaves
  38. sed -i '$ a 172.17.0.3 \
  39. \n172.17.0.4' /usr/local/hadoop/spark/conf/slaves
  40. # copy scala and spark to nodes
  41. scp -r /usr/local/hadoop/scala/ $user@172.17.0.3:/usr/local/hadoop/scala/
  42. scp -r /usr/local/hadoop/spark/ $user@172.17.0.3:/usr/local/hadoop/spark/
  43. scp -r /usr/local/hadoop/scala/ $user@172.17.0.4:/usr/local/hadoop/scala/
  44. scp -r /usr/local/hadoop/spark/ $user@172.17.0.4:/usr/local/hadoop/spark/
  45. # copy scala and spark to edge node
  46. scp -r /usr/local/hadoop/scala/ $user@172.17.0.5:/usr/local/hadoop/scala/
  47. scp -r /usr/local/hadoop/spark/ $user@172.17.0.5:/usr/local/hadoop/spark/
  48. fi
  49. # make directory
  50. #mkdir -p /var/log/spark
  51. #chown -R $user:$user /var/log/spark
  52. #mkdir -p /tmp/spark
  53. #chown -R $user:$user /tmp/spark
  54. # .bashrc
  55. sed -i '$ a export SCALA_HOME=\/usr\/local\/hadoop\/scala \
  56. \nexport PATH=\$PATH:\$SCALA_HOME\/bin \
  57. \nexport SPARK_HOME=\/usr\/local\/hadoop\/spark \
  58. \nexport PATH=\$PATH:\$SPARK_HOME\/bin' /home/$user/.bashrc
  59. source ~/.bashrc
  60. logout
  61. EOF
  62. done