1. 程式人生 > >Java API實現Hadoop文件系統增刪改查

Java API實現Hadoop文件系統增刪改查

代碼實現 java api cee 其他 mon prop com null -c

Java API實現Hadoop文件系統增刪改查

Hadoop文件系統可以通過shell命令hadoop fs -xx進行操作,同時也提供了Java編程接口

maven配置

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0
</modelVersion> <groupId>hadoopStudy</groupId> <artifactId>demo</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <name>demo</name> <url>http://maven.apache.org</url> <properties> <project.build
.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> <dependencies> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>3.8.1</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache
.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>3.0.0</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>3.0.0</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>3.0.0</version> </dependency> </dependencies> </project>

代碼實現

package hadoopStudy;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HdfsUtil {
    private FileSystem fs;
    HdfsUtil(String user){
        Configuration cfg = new Configuration();
        URI uri = null;
        try {
            uri = new URI("hdfs://localhost:9000");
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }
        try {
            // 根據配置文件,實例化成DistributedFileSystem
            fs = FileSystem.get(uri, cfg, user); // 得到fs句柄
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    /**
     * 上傳文件
     */
    public void upload(String src, String dst){
        try {
            // 上傳
            fs.copyFromLocalFile(new Path(src), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 下載文件
     */
    public void download(String src, String dst){
        try {
            // 下載
            fs.copyToLocalFile(new Path(src), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 創建文件夾
     */
    public void mkdir(String dir){
        try {
            fs.mkdirs(new Path(dir));
        } catch (IOException e) {
            // 創建目錄
            e.printStackTrace();
        }
    }

    /**
     * 刪除文件
     */
    public void delete(String name, boolean recursive){
        try {
            fs.delete(new Path(name), recursive);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 重命名
     */
    public void rename(String source, String dst){
        try {
            fs.rename(new Path(source), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 列出文件信息
     */
    public void list(String dir, boolean recursive){
        try {
            RemoteIterator<LocatedFileStatus> iter = fs.listFiles(new Path(dir), recursive);
            while (iter.hasNext()){
                LocatedFileStatus file = iter.next();
                System.out.println(file.getPath().getName());
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

小結

Hdfs的Java API提供了優雅的FileSystem抽象類,在客戶端使用時與其他文件系統區別不大

Java API實現Hadoop文件系統增刪改查