package com.hadoop.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
public class HdfsClient {
public static void main(String[] args) {
System.out.println("main");
}
@Test
public void testMkdirs() throws IOException, URISyntaxException, InterruptedException {
// 1 获取文件系统
//FileSystem fs = FileSystem.get(new URI("hdfs://hadoop102:8020"), configuration);
//FileSystem get(final URI uri, final Configuration conf, String user)
URI uri = new URI("hdfs://hadoop102:8020");
Configuration configuration = new Configuration();
String strUser = "hadoop";
FileSystem HadoopHDFS_FS = FileSystem.get(uri,configuration,strUser);
System.out.println("hdfs://hadoop102:8020");
// 2 创建目录
HadoopHDFS_FS.mkdirs(new Path("/xiyou/huaguoshan/"));
// 3 关闭资源
HadoopHDFS_FS.close();
}
@Test
public void testListFiles() throws URISyntaxException, IOException, InterruptedException {
// 1 获取文件系统
URI uri = new URI("hdfs://hadoop102:8020");
Configuration configuration = new Configuration();
String strUser = "hadoop";
FileSystem HadoopHDFS_FS = FileSystem.get(uri,configuration,strUser);
// 2 获取文件详情
//RemoteIterator<LocatedFileStatus> listFiles(final Path f, final boolean recursive)
//final Path f
// final boolean recursive recursive
Path path = new Path("/xiyou/");
boolean brecursive = true;
RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = HadoopHDFS_FS.listFiles(path, brecursive);
while (locatedFileStatusRemoteIterator.hasNext()) {
LocatedFileStatus fileStatus = locatedFileStatusRemoteIterator.next();
System.out.println("========" + fileStatus.getPath() + "=========");
System.out.println( fileStatus.getPath().getName());
System.out.println( fileStatus.getPermission());
System.out.println(fileStatus.getOwner());
System.out.println(fileStatus.getGroup());
System.out.println("getLen:"+fileStatus.getLen());
System.out.println(fileStatus.getModificationTime());
System.out.println(fileStatus.getReplication());
System.out.println(fileStatus.getBlockSize());
// 获取块信息
BlockLocation[] blockLocations = fileStatus.getBlockLocations();
System.out.println(Arrays.toString(blockLocations));
}
// 3 关闭资源
HadoopHDFS_FS.close();
}
@Test
public void testListStatus() throws URISyntaxException, IOException, InterruptedException {
// 1 获取文件系统
URI uri = new URI("hdfs://hadoop102:8020");
Configuration configuration = new Configuration();
String strUser = "hadoop";
FileSystem HadoopHDFS_FS = FileSystem.get(uri,configuration,strUser);
// 2 判断是文件还是文件夹
//FileStatus[] listStatus(Path var1)
FileStatus[] listStatus = HadoopHDFS_FS.listStatus(new Path("/xiyou/"));
for (FileStatus fileStatus : listStatus) {
// 如果是文件
if (fileStatus.isFile()) {
System.out.println("文件f:"+fileStatus.getPath().getName());
}else {
System.out.println("路径d:"+fileStatus.getPath().getName());
}
}
// 3 关闭资源
HadoopHDFS_FS.close();
}
}