[01] Demo.java

  1. import java.io.BufferedReader;
  2. import java.io.IOException;
  3. import java.io.InputStreamReader;
  4. import java.net.URI;
  5. import java.net.URISyntaxException;
  6. import org.apache.hadoop.conf.Configuration;
  7. import org.apache.hadoop.fs.FSDataInputStream;
  8. import org.apache.hadoop.fs.FileStatus;
  9. import org.apache.hadoop.fs.FileSystem;
  10. import org.apache.hadoop.fs.Path;
  11. import cn.aigamejxb.hadoop.hdfs.conf.Conf;
  12. public class Demo {
  13. public static void main(String[] args) throws IOException, URISyntaxException {
  14. URI uri = new URI(Conf.uri);
  15. FileSystem fs = FileSystem.get(uri, new Configuration());
  16. //做读取文件操作时,把这边的注释打开,下面全部注释掉
  17. //因为上面执行完后FileSystem对象会被回收,下面执行会报错
  18. // String content = readContent(fs, "/mz/xyj.txt");
  19. // if (content != null) {
  20. // System.out.println(content);
  21. // }
  22. //做上传实验时,把这个注释打开,上面注释掉
  23. //如果上面不注释,上面执行完后FileSystem对象会被回收,这里执行会报错
  24. System.out.println("上传一个文件");
  25. upload(fs, "./Me.txt", "/mz");//注意修改成自己需要的参数:1:被上传的路径(电脑上的);2:目标路径(hadoop的目录路径)
  26. }
  27. public static String readContent(FileSystem fs, String filepath) throws IOException {
  28. Path path = new Path(filepath);
  29. if (!fs.exists(path)) {
  30. return null;
  31. } else {
  32. String partContent = null;
  33. StringBuilder strBuilder = new StringBuilder();
  34. FSDataInputStream fsInputStream = null;
  35. try {
  36. fsInputStream = fs.open(path);
  37. BufferedReader bfReader = new BufferedReader(new InputStreamReader(fsInputStream));
  38. long perTime = System.currentTimeMillis();
  39. while ((partContent = bfReader.readLine()) != null) {
  40. strBuilder.append(partContent + "\n");
  41. }
  42. long curTime = System.currentTimeMillis();
  43. } catch (IOException e) {
  44. } finally {
  45. fs.close();
  46. }
  47. return strBuilder.toString();
  48. }
  49. }
  50. public static void upload(FileSystem fs, String localFile, String hdfsPath) throws IOException {
  51. Path src = new Path(localFile);// 要上传的
  52. Path dst = new Path(hdfsPath);// 目的路径
  53. fs.copyFromLocalFile(src, dst);
  54. System.out.println("upload to " + fs.getConf().get("fs.default.name"));// 默认配置文件的名称(自己new的一个空壳子config)
  55. FileStatus files[] = fs.listStatus(dst);// 目的地址的所有文件状态
  56. for (FileStatus file : files) {
  57. System.out.println(file.getPath());
  58. }
  59. }
  60. }

[02] log4j.properties

简便起见log4j的配置我也贴一下:

  1. # priority :debug<info<warn<error
  2. #you cannot specify every priority with different file for log4j
  3. log4j.rootLogger=debug,stdout,info,debug,warn,error
  4. #console
  5. log4j.appender.stdout=org.apache.log4j.ConsoleAppender
  6. log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
  7. log4j.appender.stdout.layout.ConversionPattern= [%d{yyyy-MM-dd HH:mm:ss a}]:%p %l%m%n
  8. #info log
  9. log4j.logger.info=info
  10. log4j.appender.info=org.apache.log4j.DailyRollingFileAppender
  11. log4j.appender.info.DatePattern='_'yyyy-MM-dd'.log'
  12. log4j.appender.info.File=./src/cn/aigamejxb/hadoop/hdfs/log/info.log
  13. log4j.appender.info.Append=true
  14. log4j.appender.info.Threshold=INFO
  15. log4j.appender.info.layout=org.apache.log4j.PatternLayout
  16. log4j.appender.info.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss a} [Thread: %t][ Class:%c >> Method: %l ]%n%p:%m%n
  17. #debug log
  18. log4j.logger.debug=debug
  19. log4j.appender.debug=org.apache.log4j.DailyRollingFileAppender
  20. log4j.appender.debug.DatePattern='_'yyyy-MM-dd'.log'
  21. log4j.appender.debug.File=./src/cn/aigamejxb/hadoop/hdfs/log/debug.log
  22. log4j.appender.debug.Append=true
  23. log4j.appender.debug.Threshold=DEBUG
  24. log4j.appender.debug.layout=org.apache.log4j.PatternLayout
  25. log4j.appender.debug.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss a} [Thread: %t][ Class:%c >> Method: %l ]%n%p:%m%n
  26. #warn log
  27. log4j.logger.warn=warn
  28. log4j.appender.warn=org.apache.log4j.DailyRollingFileAppender
  29. log4j.appender.warn.DatePattern='_'yyyy-MM-dd'.log'
  30. log4j.appender.warn.File=./src/cn/aigamejxb/hadoop/hdfs/log/warn.log
  31. log4j.appender.warn.Append=true
  32. log4j.appender.warn.Threshold=WARN
  33. log4j.appender.warn.layout=org.apache.log4j.PatternLayout
  34. log4j.appender.warn.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss a} [Thread: %t][ Class:%c >> Method: %l ]%n%p:%m%n
  35. #error
  36. log4j.logger.error=error
  37. log4j.appender.error = org.apache.log4j.DailyRollingFileAppender
  38. log4j.appender.error.DatePattern='_'yyyy-MM-dd'.log'
  39. log4j.appender.error.File = ./src/cn/aigamejxb/hadoop/hdfs/log/error.log
  40. log4j.appender.error.Append = true
  41. log4j.appender.error.Threshold = ERROR
  42. log4j.appender.error.layout = org.apache.log4j.PatternLayout
  43. log4j.appender.error.layout.ConversionPattern = %d{yyyy-MM-dd HH:mm:ss a} [Thread: %t][ Class:%c >> Method: %l ]%n%p:%m%n

[03] Permission denied 报错解决方案

16.HDFS文件上传与读取Demo.java代码和log4j配置 - 图1

  • 添加系统变量

    1. HADOOP_USER_NAME=root
    2. #变量值根据自己账户情况而定,比如我的linux账户为root。。

    16.HDFS文件上传与读取Demo.java代码和log4j配置 - 图2

  • 重启Eclipse,再跑一次

16.HDFS文件上传与读取Demo.java代码和log4j配置 - 图3