1. 程式人生 > >java 讀取本地和hdfs資料夾

java 讀取本地和hdfs資料夾

package my.test;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* * @author zhouhh * @date 2013.3.14 * merge files to hdfs */ public class testhdfspaths { public static void main(String[] args) throws IOException { String sourceFile = ""; String targetFile = ""; if (args.length < 2) { sourceFile = "/home/zhouhh/test"; targetFile = "hdfs://hadoop48:54310/user/zhouhh"
; } else { sourceFile = args[0]; targetFile = args[1]; } Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(conf); FileSystem local = FileSystem.getLocal(conf); try { visitPath(local,sourceFile); visitPath(hdfs,targetFile); } catch (IOException e) { e
.printStackTrace(); } } public static void visitPath(FileSystem fs,String path) throws IOException { Path inputDir = new Path(path); FileStatus[] inputFiles = fs.listStatus(inputDir); if(inputFiles==null) { throw new IOException(" the path is not correct:" + path); } System.out.println("----------------path:"+path+"----------------"); for (int i = 0; i < inputFiles.length; i++) { if(inputFiles[i].isDir()) { System.out.println(inputFiles[i].getPath().getName()+" -dir-"); visitPath(fs,inputFiles[i].getPath().toString()); } else { System.out.println(inputFiles[i].getPath().getName()+",len:"+inputFiles[i].getLen()); } } } }