aa.png
    cmd用来在windows上跑。。
    sh用来在linux上跑,以后听到linux的shell脚本,.sh就是
    aa.png
    aa.png

    1. package com.moon.utils;
    2. import java.io.FileOutputStream;
    3. import java.io.InputStream;
    4. import org.apache.hadoop.conf.Configuration;
    5. import org.apache.hadoop.fs.FSDataInputStream;
    6. import org.apache.hadoop.fs.FSDataOutputStream;
    7. import org.apache.hadoop.fs.FileStatus;
    8. import org.apache.hadoop.fs.FileSystem;
    9. import org.apache.hadoop.fs.Path;
    10. public class HdfsUtils {
    11. private static Configuration conf;
    12. static {
    13. conf = new Configuration();
    14. conf.set("fs.defaultFS", "hdfs://127.0.0.1:9000");
    15. }
    16. // 查看文件列表
    17. public static void ls(String path) {
    18. try {
    19. FileSystem fs = FileSystem.get(conf);
    20. FileStatus[] arr = fs.listStatus(new Path(path));
    21. for (FileStatus f : arr) {
    22. System.out.println(f.getPath().getName());
    23. }
    24. fs.close();
    25. } catch (Exception e) {
    26. e.printStackTrace();
    27. }
    28. }
    29. // 创建目录
    30. public static void mkdir(String path) {
    31. try {
    32. FileSystem fs = FileSystem.get(conf);
    33. fs.mkdirs(new Path(path));
    34. System.out.println("创建目录成功");
    35. fs.close();
    36. } catch (Exception e) {
    37. e.printStackTrace();
    38. }
    39. }
    40. // 删除
    41. public static void delete(String path) {
    42. try {
    43. FileSystem fs = FileSystem.get(conf);
    44. fs.delete(new Path(path));
    45. System.out.println("删除成功");
    46. fs.close();
    47. } catch (Exception e) {
    48. e.printStackTrace();
    49. }
    50. }
    51. // ntfs->hdfs 要求传本地路径和云端路径。在真实开发中是不适合的
    52. public static void uploadToHdfs(String localPath, String hdfsPath) {
    53. try {
    54. FileSystem fs = FileSystem.get(conf);
    55. fs.copyFromLocalFile(new Path(localPath), new Path(hdfsPath));
    56. System.out.println("上传成功");
    57. fs.close();
    58. } catch (Exception e) {
    59. e.printStackTrace();
    60. }
    61. }
    62. // ntfs->hdfs uploadToHdfs改良版
    63. public static void uploadToHdfs(InputStream inputStream, String hdfsPath) {
    64. try {
    65. FileSystem fs = FileSystem.get(conf);
    66. FSDataOutputStream outputStream = fs.create(new Path(hdfsPath));
    67. byte[] buf = new byte[1024];
    68. int len;
    69. while ((len = inputStream.read(buf)) != -1) {
    70. outputStream.write(buf, 0, len);
    71. outputStream.flush();
    72. }
    73. System.out.println("上传成功");
    74. outputStream.close();
    75. inputStream.close();
    76. fs.close();
    77. } catch (Exception e) {
    78. e.printStackTrace();
    79. }
    80. }
    81. // hdfs->ntfs
    82. public static void downloadTolocal(String hdfsPath, String localPath) {
    83. try {
    84. FileSystem fs = FileSystem.get(conf);
    85. FSDataInputStream dataInputStream = fs.open(new Path(hdfsPath));
    86. FileOutputStream outputStream = new FileOutputStream(localPath);
    87. byte[] buf = new byte[1024];
    88. int len;
    89. while ((len = dataInputStream.read(buf)) != -1) {
    90. outputStream.write(buf, 0, len);
    91. outputStream.flush();
    92. }
    93. System.out.println("下载成功");
    94. outputStream.close();
    95. dataInputStream.close();
    96. fs.close();
    97. } catch (Exception e) {
    98. e.printStackTrace();
    99. }
    100. }
    101. // 测试主函数
    102. public static void main(String[] args) {
    103. //
    104. }
    105. }

    你们课下假如有人去做做这个试验,那么我中午发的压缩包,记得解压一定要在C:/hadoop路径。然后记得配个环境变量,然后格式化,打开服务。