补充某天的hdfs课堂测试,具体是哪天的,我也不记得了
看一下题目
直接上代码了
package com.rsh.hdfs.test2; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import java.io.IOException; import java.util.Scanner; public class HDFSTest { public static FileSystem fs = null; public static void main(String[] args) throws IOException { Configuration configuration = new Configuration(); System.setProperty("HADOOP_USER_NAME","root"); configuration.set("fs.defaultFS","hdfs://node1:8020"); fs = FileSystem.get(configuration); Scanner sc = new Scanner(System.in); System.out.print("请输入将要创建文件的路径:"); String path = sc.next(); create(path); update(path); save(path); fs.close(); } public static void create(String path) throws IOException { if(!fs.exists(new Path(path))){ FSDataOutputStream outputStream = fs.create(new Path(path)); outputStream.writeUTF("新建文件成功"); outputStream.flush(); outputStream.close(); FSDataInputStream open = fs.open(new Path(path)); System.out.println(open.readUTF()); open.close(); } } public static void update(String path) throws IOException { Scanner sc = new Scanner(System.in); FSDataOutputStream append = fs.append(new Path(path)); System.out.print("请输入将要修改的内容:"); String s = sc.next(); append.writeUTF(s); append.flush(); append.close(); FSDataInputStream open = fs.open(new Path(path)); System.out.println(open.readUTF()); } public static void save(String path) throws IOException { Scanner sc = new Scanner(System.in); Path src = new Path(path); Path mid = new Path("D:\\test.txt"); System.out.print("请输入文件将要另存的路径:"); String savePath = sc.next(); Path path1 = new Path(savePath); fs.copyToLocalFile(src,mid); fs.copyFromLocalFile(mid,path1); fs.close(); } }
标签:hdfs,fs,Scanner,System,new,测试,path,Path,课堂 From: https://www.cnblogs.com/20203923rensaihang/p/16849476.html