1. cd /home/hadoop/app/hadoop/etc/hadoop
    2. vi core-site.xml

    更改core-site.xml文件,插入以下代码

    1. <!-- 设置用户 -->
    2. <property>
    3. <name>hadoop.http.staticuser.user</name>
    4. <value>root</value>
    5. </property>
    6. <!-- 不开启权限检查 -->
    7. <property>
    8. <name>dfs.permissions.enabled</name>
    9. <value>false</value>
    10. </property>

    更改完之后就可以通过网页端上传文件了

    1. vi hdfs-site.xml

    插入以下代码

    1. <property>
    2. <name>dfs.permissions</name>
    3. <value>false</value>
    4. </property>

    前期准备工作已经完成,打开启动hadoop服务
    先把比做环节完成了
    实验三必完成环节.pdf
    截图发给老师
    image.png
    接下来是选做环节
    实验三选做环节.docx
    按照第二种配置IDEA
    创建MAVEN项目
    image.png
    直接next
    pom.xml文件

    1. import org.apache.hadoop.conf.Configuration;
    2. import org.apache.hadoop.fs.*;
    3. import javax.security.auth.login.AppConfigurationEntry;
    4. import java.io.IOException;
    5. import java.io.PrintStream;
    6. import java.net.URI;
    7. import java.net.URL;
    8. public class demo {
    9. }
    10. class MyPathFilter implements PathFilter{
    11. String reg = null;
    12. MyPathFilter(String reg){
    13. this.reg = reg;
    14. }
    15. @Override
    16. public boolean accept(Path path) {
    17. if (!(path.toString().matches(reg))){
    18. return true;
    19. }
    20. return false;
    21. }
    22. }
    23. class MergeFile{
    24. Path inputPath = null;
    25. Path outputPath = null;
    26. public MergeFile(String inputPath,String outputPath){
    27. this.inputPath = new Path(inputPath);
    28. this.outputPath = new Path(outputPath);
    29. }
    30. public void doMerge() throws IOException{
    31. Configuration conf = new Configuration();
    32. conf.set("fs.defaultFS","hdfs://192.168.255.123:9000");
    33. conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
    34. FileSystem fsSource = FileSystem.get(URI.create(inputPath.toString()),conf);
    35. FileSystem fsDst = FileSystem.get(URI.create(outputPath.toString()),conf);
    36. FileStatus[] sourceStatus = fsSource.listStatus(inputPath,new MyPathFilter(".*\\.abc"));
    37. FSDataOutputStream fsdos = fsDst.create(outputPath);
    38. PrintStream ps = new PrintStream(System.out);
    39. for (FileStatus sta:
    40. sourceStatus) {
    41. System.out.println("路径:"+sta.getPath()+"文件大小:"+sta.getLen()+"权限:"+sta.getPermission()+"内容:");
    42. FSDataInputStream fsdis = fsSource.open(sta.getPath());
    43. byte[] data = new byte[1024];
    44. int read = -1;
    45. while ((read = fsdis.read(data))>0){
    46. ps.write(data,0,read);
    47. fsdos.write(data,0,read);
    48. }
    49. fsdis.close();
    50. }
    51. ps.close();
    52. fsdos.close();
    53. }
    54. public static void main(String[] args) throws IOException {
    55. MergeFile merge = new MergeFile("hdfs://192.168.255.123:9000/user/hadoop","hdfs://192.168.255.123:9000/user/hadoop/merge.txt");
    56. merge.doMerge();
    57. }
    58. }

    运行就完了
    image.png
    image.png
    看到有merge.txt文件