linux<—>HDFS
HDFS之间
与linux基本相同
-setrep:设置HDFS中文件的副本数量
[atguigu@hadoop102 hadoop-3.1.3]$ hadoop fs -setrep 10 /sanguo/shuguo/kongming.txt
HDFS—>client下载
hadoop fs-getmerge-get = -copyToLocalhdfspath linuxpath
Client—>HDFS上传
hadoop fs-appendToFile-put = -copyFormLocal-moveFromLocallinuxpath hdfspath
查看hdfs路径
hdfs getconf -confKey fs.default.name
IDEA(API)<—>HDFS
创建maven工程
设置依赖
POM文件:
<dependencies><dependency><groupId>junit</groupId><artifactId>junit</artifactId><version>4.12</version></dependency><dependency><groupId>org.apache.logging.log4j</groupId><artifactId>log4j-slf4j-impl</artifactId><version>2.12.0</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>3.1.3</version></dependency></dependencies>
设置配置文件
在项目的src/main/resources目录下,新建一个文件,命名为“log4j2.xml”,在文件中填入\
<?xml version="1.0" encoding="UTF-8"?><Configuration status="error" strict="true" name="XMLConfig"><Appenders><!-- 类型名为Console,名称为必须属性 --><Appender type="Console" name="STDOUT"><!-- 布局为PatternLayout的方式,输出样式为[INFO] [2018-01-22 17:34:01][org.test.Console]I'm here --><Layout type="PatternLayout"pattern="[%p] [%d{yyyy-MM-dd HH:mm:ss}][%c{10}]%m%n" /></Appender></Appenders><Loggers><!-- 可加性为false --><Logger name="test" level="info" additivity="false"><AppenderRef ref="STDOUT" /></Logger><!-- root loggerConfig设置 --><Root level="info"><AppenderRef ref="STDOUT" /></Root></Loggers></Configuration>
代码思路
- 不创建流:
创建文件系统:
操作:文件系统调用方法进行操作
关闭资源
- 创建流:
1.创建文件系统
2.
创建输入流
创建输出流
调用hadoop包的io流工具类IOUtil
3.关闭资源
API操作:
上传、下载、文件夹删除、文件名更改(移动文件夹)、文件详情查看、文件和文件夹判断
public class HDFSClient {FileSystem fs = null;@Before//在操作执行前执行public void before() throws URISyntaxException, IOException, InterruptedException {//创建连接对象Configuration configuration = new Configuration();configuration.set("dfs.replication","2");fs = FileSystem.get(new URI("hdfs://hadoop102:9820"),configuration,"atguigu");}@After//在操作执行后执行public void after() throws IOException {//关闭资源if (fs != null) {fs.close();}}@Testpublic void upload() throws IOException {fs.copyFromLocalFile(false,true,new Path("C:\\Users\\ldc\\AppData\\Roaming\\feiq\\Recv Files\\时间同步问题.txt"),new Path("\\"));}@Testpublic void download() throws IOException {fs.copyToLocalFile(true,new Path("\\时间同步问题.txt"),new Path("C:\\Users\\ldc\\AppData\\Roaming\\feiq\\Recv Files"));}@Testpublic void delete() throws IOException {fs.delete(new Path("\\mybash"),true);}@Testpublic void rename() throws IOException {//修改名字// fs.rename(new Path("/happy"), new Path("/nothappy"));//移动文件fs.rename(new Path("/happy/jdk-8u212-linux-x64.tar.gz"),new Path("/good/hh"));System.out.println("成功移动文件");}/** @Author ldc* @Description //TODO 生成目录* @Date 15:58 2021/4/12* @Param []* @return void*/@Testpublic void mkdir() throws IOException {fs.mkdirs(new Path("/good"));}/** @Author ldc* @Description //TODO 查看文件详情,只能是文件,不能是目录!* @Date 16:01 2021/4/12* @Param []* @return void*/@Testpublic void list() throws IOException {//recursive 是找到指定目录下的所有文件,listfiles返回的是迭代器,功能和liststatus差不多RemoteIterator<LocatedFileStatus> lsInterator = fs.listFiles(new Path("\\"), false);while (lsInterator.hasNext()) {LocatedFileStatus next = lsInterator.next();System.out.println("文件名是:"+ next.getPath().getName());System.out.println("文件所属群是:"+ next.getGroup());System.out.println("文件所属主是:"+ next.getOwner());System.out.println("文件块的位置是:"+ Arrays.toString(next.getBlockLocations()));if (next.isDirectory()) {System.out.println("这个是目录");} else if (next.isFile()) {System.out.println("这个是文件");}System.out.println("===========================================");}}@Testpublic void touch() throws IOException {fs.createNewFile(new Path("/a.txt"));}@Testpublic void fileType() throws IOException {//返回的指定路径的所有文件以及文件夹,返回的是数组,功能和listfiles差不多FileStatus[] fileStatuses = fs.listStatus(new Path("\\"));for (FileStatus fileStatus : fileStatuses) {if (fileStatus.isFile()) {System.out.println(fileStatus.getPath().getName()+"是文件");} else {System.out.println(fileStatus.getPath().getName()+"是目录");}}}//io流@Testpublic void uploadIO() throws IOException {FileInputStream fis = new FileInputStream("C:\\Users\\ldc\\AppData\\Roaming\\feiq\\Recv Files\\尚硅谷大数据技术之Hadoop(入门)V3.0.docx");FSDataOutputStream fds = fs.create(new Path("\\nothappy\\尚硅谷大数据技术之Hadoop(入门)V3.0.docx"));//引入io流工具类IOUtils.copyBytes(fis,fds,1024,true);}@Testpublic void downloadIO() throws IOException {FSDataInputStream fis = fs.open(new Path("\\nothappy\\尚硅谷大数据技术之Hadoop(入门)V3.0.docx"));FileOutputStream fos = new FileOutputStream("C:\\ldc_zoom\\ShangBigDatas\\03-BigData\\04-hadoop\\04-hadoop\\4.视频\\尚硅谷大数据技术之Hadoop(入门)V3.0.docx");IOUtils.copyBytes(fis,fos,1024,true);}}
