本次实验环境是在windows系统下,通过windows的hadoop依赖在IDEA进行
首先需要下载windows支持的hadoop依赖程序
以下是下载链接
winutils-1: winutils.exe hadoop.dll and hdfs.dll binaries for hadoop windows - Gitee.com?
解压后还需要配置环境变量
?
?
?
接下来就可以在 IDEA 通过 Maven 来使用 Hadoop 的API函数啦,以下是项目目录结构
?以下是 pom.xml 依赖
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.atguigu</groupId>
<artifactId>HDFSClient</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.2.3</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.36</version>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>compile</scope>
</dependency>
</dependencies>
</project>
?以下是 log4j.properties 的配置信息
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
以下是 HdfsClient.java 测试HadoopAPI函数的源码,这里要注意导包别导错了
package com.atguigu.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.*;
import java.io.IOException;
import java.lang.reflect.Array;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
public class HdfsClient {
private FileSystem fs;
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
// 连接的集群nn地址
URI uri = new URI("hdfs://hadoop102:8020");
// 创建一个配置文件
Configuration configuration = new Configuration();
configuration.set("dfs.replication", "2");
// 指定用户
String user = "hadoop";
// 获取客户端对象
fs = FileSystem.get(uri, configuration, user);
}
@After
public void close() throws IOException {
// 关闭资源
fs.close();
}
@Test
public void testMkdir() throws IOException {
// 创建一个文件夹
fs.mkdirs(new Path("/xiyou/huaguoshan1"));
}
/**
* 参数优先级
* hdfs-default.xml => hdfs.site.xml => 在项目资源目录下的配置文件 => 代码里面的配置
*
* @throws IOException
*/
// 上传
@Test
public void testPut() throws IOException {
fs.copyFromLocalFile(false, true, new Path("D:\\sunwukong.txt"), new Path("hdfs://hadoop102/xiyou/huaguoshan"));
}
// 文件下载
@Test
public void testGet() throws IOException {
fs.copyToLocalFile(true, new Path("hdfs://hadoop102/xiyou/huaguoshan"), new Path("D:\\"), true);
}
// 删除
@Test
public void testRm() throws IOException {
// 删除文件
// fs.delete(new Path("/jdk-8u321-linux-x64.tar.gz"), false);
// 删除空目录
// fs.delete(new Path("/xiyou"), false);
// 删除非空目录
fs.delete(new Path("/jinguo"), true);
}
// 文件的更名和移动
@Test
public void testMv() throws IOException {
// 修改文件名
// fs.rename(new Path("/input/word.txt"), new Path("/input/ss.txt"));
// 移动文件
// fs.rename(new Path("/input/ss.txt"), new Path("/cls.txt"));
// 目录更名
fs.rename(new Path("/input"), new Path("/output"));
}
// 获取文件详细信息
@Test
public void fileDetail() throws IOException {
// 获取所有文件信息
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
// 遍历文件
while (listFiles.hasNext()) {
LocatedFileStatus fileStatus = listFiles.next();
System.out.println("========" + fileStatus.getPath() + "========");
System.out.println(fileStatus.getPermission());
System.out.println(fileStatus.getOwner());
System.out.println(fileStatus.getGroup());
System.out.println(fileStatus.getLen());
System.out.println(fileStatus.getModificationTime());
System.out.println(fileStatus.getReplication());
System.out.println(fileStatus.getBlockSize());
System.out.println(fileStatus.getPath().getName());
// 获取块信息
BlockLocation[] blockLocations = fileStatus.getBlockLocations();
// 从0位置读取128M 从134217728位置读128M
// [0,134217728,hadoop103,hadoop104,hadoop102, 134217728,134217728,hadoop104,hadoop103,hadoop102, 268435456,134217728,hadoop104,hadoop103,hadoop102, 402653184,89588777,hadoop104,hadoop103,hadoop102]
System.out.println(Arrays.toString(blockLocations));
}
}
// 判断是文件夹还是文件
@Test
public void testFile() throws IOException {
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
for (FileStatus status : fileStatuses) {
if (status.isFile()) {
System.out.println("文件:" + status.getPath().getName());
} else {
System.out.println("目录:" + status.getPath().getName());
}
}
}
}
|