1.配置Hadoop的Windows客户端
2.新建Maven项目[略]
3.添加依赖
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.3.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-reload4j -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId>
<version>2.0.9</version>
</dependency>
4.在项目的 src/main/resources目录下,新建日志配置文件,文件名为“log4j.properties”
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
5.新建测试类,用于在Hadoop中创建一个文件夹
package cn.coreqi.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
/**
* 客户端代码步骤
* 1.首先获取一个客户端对象
* 2.执行相关的操作命令
* 3.关闭资源
*/
public class HdfsClient {
/**
* 客户端对象
*/
private FileSystem fs;
/**
* 初始化客户端对象
* @throws IOException
* @throws InterruptedException
* @throws URISyntaxException
*/
@Before
public void init() throws IOException, InterruptedException, URISyntaxException {
// 需要连接的Hadoop NameNode地址
URI uri = new URI("hdfs://192.168.58.130:8020");
// 创建一个配置文件
Configuration configuration = new Configuration();
// 用户
String user = "root";
// 获取客户端对象
fs = FileSystem.get(uri, configuration,user);
}
/**
* 释放客户端资源
* @throws IOException
*/
@After
public void close() throws IOException {
// 关闭资源
fs.close();
}
@Test
public void testMkdirs() throws IOException{
// 创建一个文件夹
fs.mkdirs(new Path("/coreqi"));
}
}
标签:HDFS,Java,Hadoop,throws,import,apache,org,log4j,客户端
From: https://www.cnblogs.com/fanqisoft/p/17892815.html