Maven配置

  1. <dependencies>
  2. <dependency>
  3. <groupId>org.apache.hadoop</groupId>
  4. <artifactId>hadoop-client</artifactId>
  5. <version>2.7.3</version>
  6. </dependency>
  7. </dependencies>

创建文件夹

  1. Configuration config = new Configuration();
  2. try {
  3. FileSystem fs = FileSystem.get(new URI("hdfs://localhost:9000"),config);
  4. boolean result =fs.mkdirs(new Path("/api/test"));
  5. System.out.println(result);
  6. }catch (Exception e){
  7. e.printStackTrace();
  8. }

读取文件

  1. FSDataInputStream in = fileSystem.open(new Path("/hadoop.txt"));
  2. IOUtils.copyBytes(in,System.out,1024);

创建文件

  1. FSDataOutputStream out = fileSystem.create(new Path("/hello.txt"));
  2. out.writeUTF("hello world");
  3. out.flush();
  4. out.close();

重命名文件

  1. fileSystem.rename(new Path("/hello.txt"),new Path("/new.txt"));

拷贝本地文件到hdfs

  1. fileSystem.copyFromLocalFile(new Path("./pom.xml"),new Path("/pom.xml"));

下载hdfs文件到本地

  1. fileSystem.copyToLocalFile(new Path("/hadoop.txt"),new Path("hadoop.txt"));

获取文件列表

  1. FileStatus[] fileStatus = fileSystem.listStatus(new Path("/"));
  2. for(FileStatus s: fileStatus){
  3. System.out.println(s.getPath().toString());
  4. }

递归获取文件

  1. RemoteIterator<LocatedFileStatus> fileStatus = fileSystem.listFiles(new Path("/"),true);
  2. while (fileStatus.hasNext()){
  3. System.out.println(fileStatus.next().getPath().toString());
  4. }

删除文件

fileSystem.delete(new Path("/new.txt"),true);