- 查看当前系统的名称
[root@master dfs]# cat /etc/hosts
192.168.128.78 hadoop01
- 查看core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://hadoop01:9000</value>
</property>
- 删除文件夹
# 先停止hadoop
stop-dfs.sh
# 删除data和name
[root@master dfs]# pwd
/home/software/hadoop-3.2.1/tmp/dfs
[root@master dfs]# ls
data name namesecondary
[root@master dfs]# rm -rf data
[root@master dfs]# rm -rf name
[root@master dfs]# ls
namesecondary
# tmp目录下只有dfs
[root@master tmp]# pwd
/home/software/hadoop-3.2.1/tmp
[root@master tmp]# ls
dfs
# 删除logs目录下的所有文件
[root@master logs]# pwd
/home/software/hadoop-3.2.1/logs
# 格式化
hdfs namenode -format
# 启动
start-dfs.sh
# 查看
jps
标签:tmp,hadoop,dfs,start,sh,master,root
From: https://www.cnblogs.com/dogleftover/p/17877357.html