HDFS java介面——實現目錄增刪檔案讀寫
阿新 • • 發佈:2019-01-12
1、在Idea中,建立maven專案,選擇org.apache.maven.archetype:maven-archetype-quickstart,建立一個普通的Java專案(archetypeCatalog:internal),預設會建立 App 類。
2、新增依賴
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>2.6.0</version> </dependency>
2、新增log4j.properties
#全域性配置 log4j.rootLogger= ERROR,stdout #Hadoop 日誌配置 log4j.logger.org.apache.hadoop= DEBUG #控制檯輸出配置 log4j.appender.stdout= org.apache.log4j.ConsoleAppender log4j.appender.stdout.layout= org.apache.log4j.PatternLayout log4j.appender.stdout.layout.ConversionPattern= %5p [%t] - %m%n
3、檔案和目錄相關操作
package Hadoop; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import java.io.IOException; /** * Hello world! * */ public class App { private static Configuration configuration = new Configuration(); static { configuration.set("fs.defaultFS","hdfs://192.168.255.128:9000"); System.setProperty("HADOOP_USER_NAME","root"); System.setProperty("hadoop.home.dir","E:/hadoop2.6"); } public static void main( String[] args ) throws IOException { //createDir(); //list(); //uploadFile(); //readContent(); deleteDir(); } /** * 建立資料夾 * @throws IOException */ private static void createDir() throws IOException { FileSystem fs = FileSystem.get(configuration); fs.mkdirs(new Path("/hdfs2")); fs.close(); System.out.println("建立成功"); } /** * 上傳檔案(將檔案拷貝到HDFS上) * @throws IOException */ private static void uploadFile() throws IOException { FileSystem fs = FileSystem.get(configuration); Path srcPath = new Path("E:/1.txt"); Path targetPath = new Path("/hdfs/"); fs.copyFromLocalFile(srcPath,targetPath); fs.close(); System.out.println("上傳成功"); } /** * 展示hdfs檔案或目錄 * @throws IOException */ private static void list() throws IOException { FileSystem fs = FileSystem.get(configuration); FileStatus[] fileStatuses = fs.listStatus(new Path("/")); for(FileStatus fileStatus:fileStatuses){ String type = fileStatus.isDirectory() ? "目錄":"檔案"; String name = fileStatus.getPath().getName(); System.out.println(type + "---" + name); } fs.close(); } /** * 讀取檔案內容 * @throws IOException */ private static void readContent() throws IOException { FileSystem fs = FileSystem.get(configuration); FSDataInputStream stream = fs.open(new Path("/input/input2.txt")); IOUtils.copyBytes(stream,System.out,1024,true); IOUtils.closeStream(stream); fs.close(); } /** * 刪除目錄 * @throws IOException */ private static void deleteDir() throws IOException { FileSystem fs = FileSystem.get(configuration); fs.delete(new Path("/hdfs2"),true); fs.close(); System.out.println("刪除成功"); } }
E:\hadoop2.6\bin下檔案如下