JAVA API 上傳下載檔案到HDFS
阿新 • • 發佈:2018-12-13
package com.kfk.hadoop.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import java.io.File; import java.io.FileInputStream; public class HdfsApp { private FileSystem getFileSystem()throws Exception{ Configuration configuration = new Configuration(); FileSystem fileSystem = FileSystem.get(configuration); return fileSystem; } private void readHdfsFile(String filePath){ FSDataInputStream fsDataInputStream=null; try { Path path = new Path(filePath); fsDataInputStream = this.getFileSystem().open(path); IOUtils.copyBytes(fsDataInputStream,System.out,4096,false); }catch (Exception e){ e.printStackTrace(); }finally { if(fsDataInputStream != null){ IOUtils.closeStream(fsDataInputStream); } } } private void writeHdfsFile(String inPath,String outPath){ FSDataOutputStream fsDataOutputStream = null; FileInputStream fileInputStream = null; try { Path path = new Path(outPath); fsDataOutputStream = this.getFileSystem().create(path); fileInputStream = new FileInputStream(new File(inPath)); IOUtils.copyBytes(fileInputStream,fsDataOutputStream,4096,false); }catch (Exception e){ e.printStackTrace(); }finally { if (fileInputStream !=null){ IOUtils.closeStream(fileInputStream); }if (fsDataOutputStream !=null){ IOUtils.closeStream(fsDataOutputStream); } } } public static void main(String[] args){ HdfsApp hdfsApp = new HdfsApp(); // hdfsApp.readHdfsFile(filePath); String inPath="D://workSpace/src/main/resources/hdfs-site.xml"; String outPath="hdfs://ns/user/kfk/data/local.xml"; hdfsApp.writeHdfsFile(inPath,outPath); } }