https://blog.csdn.net/csdn_huzeliang/article/details/78632701?utm_medium=distribute.pc_relevant_t0.none-task-blog-BlogCommendFromMachineLearnPai2-1.nonecase&depth_1-utm_source=distribute.pc_relevant_t0.none-task-blog-BlogCommendFromMachineLearnPai2-1.nonecase

    1. vim /ect/hostname
    2. //添加ip master
    3. vim /etc/hosts
    4. vim /etc/sysconfig/network-scripts/ifcfg-ens32
    5. //添加配置
    6. BOOTPROTO="static"
    7. IPADDR=192.168.66.133
    8. NETWORK=255.255.255.0
    9. GATEWAY=192.168.66.2
    10. DNS1=192.168.66.2 //与网关相同 https://www.cnblogs.com/lyangfighting/p/9518726.html
    11. service network restart //重启网络服务
    12. 启动报错:// https://blog.csdn.net/dyw_666666/article/details/103117357
    13. Restarting network (via systemctl):
    14. Job for network.service failed because the control process exited with error code.
    15. See "systemctl status network.service" and "journalctl -xe" for details.
    16. 解决:
    17. systemctl stop NetworkManager
    18. systemctl disable NetworkManager
    19. systemctl start network.service
    20. ssh-keygen -t rsa //一直回车
    21. ssh-copy-id master
    22. cd ~
    23. ls -al //查看有无.ssh
    24. cd .ssh
    25. more authorized_keys
    26. ssh master
    27. mkdir /software //安装包地址
    28. tar -zxvf hadoop-2.6.0.tar.gz
    29. tar -zxvf jdk-8u202-linux-x64.tar.gz
    30. mv jdk1.8.0_202 /usr/local/jdk
    31. mv hadoop-2.6.0 /usr/local/hadoop
    32. cd /usr/local/hadoop/etc/hadoop
    33. vim hadoop-env.sh //更改配置 export JAVA_HOME=/usr/local/jdk
    34. vim core-site.xml
    35. <configuration>
    36. <property>
    37. <name>fs.default.name</name>
    38. <value>hdfs://hdp4:9000</value>
    39. </property>
    40. <property>
    41. <name>hadoop.tmp.dir</name>
    42. <value>/usr/local/hadoop/tmp</value>
    43. </property>
    44. </configuration>
    45. vim hdfs-site.xml
    46. <configuration>
    47. <property>
    48. <name>dfs.replication</name>
    49. <value>1</value>
    50. </property>
    51. <property>
    52. <name>dfs.permissions</name>
    53. <value>false</value>
    54. </property>
    55. </configuration>
    56. cp mapred-site.xml.template mapred-site.xml
    57. vim mapred-site.xml
    58. <configuration>
    59. <property>
    60. <name>mapreduce.framework.name</name>
    61. <value>yarn</value>
    62. </property>
    63. <property>
    64. <name>mapreduce.jobhistory.address</name>
    65. <value>hdp4:10020</value>
    66. </property>
    67. </configuration>
    68. vim yarn-site.xml
    69. <configuration>
    70. <!-- Site specific YARN configuration properties -->
    71. <property>
    72. <name>yarn.resourcemanager.hostname</name>
    73. <value>hdp4</value>
    74. </property>
    75. <property>
    76. <name>yarn.nodemanager.aux-services</name>
    77. <value>mapreduce_shuffle</value>
    78. </property>
    79. <property>
    80. <name>mapreduce.job.ubertask.enable</name>
    81. <value>true</value>
    82. </property>
    83. </configuration>
    84. JDK环境配置
    85. vim /etc/profile
    86. export JAVA_HOME=/user/local/jdk
    87. export HADOOP_HOME=/usr/local/hadoop
    88. export PATH=.:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
    89. source /etc/profile
    90. hadoop namenode -format
    91. //启动
    92. start-dfs.sh
    93. start-yarn.sh
    94. //关闭防火墙 访问http://ip:50070/
    95. sudo systemctl stop firewalld 临时关闭
    96. sudo systemctl disable firewalld ,然后reboot 永久关闭
    97. sudo systemctl status firewalld 查看防火墙状态。

    当前进程
    image.png