package com.qm.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
public class HdfsClient {
private FileSystem fs;
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
URI uri = new URI("hdfs://hadoop102:8020");
Configuration configuration = new Configuration();
String user = "qm";
fs = FileSystem.get(uri, configuration,user);
}
@After
public void close() throws IOException {
fs.close();
}
//创建目录
@Test
public void testmkdir() throws URISyntaxException, IOException, InterruptedException {
//创建文件夹
fs.mkdirs(new Path("/xiyou/huaguoshan"));
}
//上传
@Test
public void testPut() throws IOException {
// 参数解读:
// 参数一:表示删除原数据;
// 参数二:是否允许覆盖;
// 参数三:原数据路径;
// 参数四:目的地路径
fs.copyFromLocalFile(false,false,
new Path("/Users/qmmm666666/Desktop/DEPT.txt"),
new Path("hdfs://hadoop102/xiyou/huaguoshan"));
}
}
问题:
最近出现的一次HDFS报错问题,查看一个文件是报错Couldn't preview the file.
造成原因
-
配置文件hdfs-site.xml配置有误
-
mac下的/etc/hosts 与 linux下的/etc/hosts下的hadoop映射地址或名称不一致
解决方法
-
在hdfs-site.xml配置如下依赖
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
</property>
-
修改mac下的/etc/hosts 与 linux下的etc/hosts下的hadoop 映射地址保持一致
我的是
Mac的/etc/hosts
192.168.164.1 macbook
192.168.164.102 qm102
192.168.164.103 qm103
192.168.164.104 qm104
Linux的/etc/hosts
192.168.164.1 macbook
192.168.164.102 qm102
192.168.164.103 qm103
192.168.164.104 qm104
解决完成:
标签:HDFS,fs,etc,192.168,hosts,报错,org,import,preview From: https://blog.csdn.net/Qmmm666666/article/details/137560352