1. 程式人生 > >hadoop hdfs (java api)

hadoop hdfs (java api)

gpo IT miss 方法 上傳下載 lose before AI lib

簡單介紹使用java控制hdfs文件系統

一、註意namenode端訪問權限,修改hdfs-site.xml文件或修改文件目錄權限

本次采用修改hdfs-site.xml用於測試,在configuration節點中添加如下內容

    <property>
        <name>dfs.permissions.enabled</name>
        <value>false</value>
    </property>

二、本次使用eclipse環境新建項目完成測試

使用手動添加jar包完成環境準備,jar包位於hadoop解壓目錄

如下:

hadoop-2.7.3\share\hadoop\common\hadoop-common-2.7.3.jar
hadoop-2.7.3\share\hadoop\common\lib\*.jar
hadoop-2.7.3\share\hadoop\hdfs\hadoop-hdfs-2.7.3.jar

添加完成jar包就可以編寫代碼,鏈接hdfs文件系統

鏈接hdfs需完成如下步驟

1.創建 org.apache.hadoop.conf.Configuration 用於指定客戶端的配置(服務器的地址,上傳下載文件的一些配置),本次采用如下方式配置

package com.huaqin.hdfs.conf;

import org.apache.hadoop.conf.Configuration; public class DeFaultDfsClientConfigration extends Configuration{ public DeFaultDfsClientConfigration() { this.set("fs.defaultFS","hdfs://*.*.*.*:9000"); this.set("dfs.replication", "2"); } }

2.編寫Utils封裝常見操作文件方法

需使用org.apache.hadoop.fs.FileSystem

通過上面的配置文件創建

FileSystem fileSystem = FileSystem.get(new DeFaultDfsClientConfigration());

創建完成之後便可以操作hdfs了,代碼封裝如下

package com.huaqin.hdfs.utils;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Map;

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import com.huaqin.hdfs.conf.DeFaultDfsClientConfigration;

public class HDFSFileUtils {

    public double progressBar;

    public HDFSFileUtils() throws IOException {
        // 使用默認類加載
        fileSystem = FileSystem.get(new DeFaultDfsClientConfigration());
    }

    public HDFSFileUtils(DeFaultDfsClientConfigration clientConfration) throws IOException {
        // 使用指定類加載
        fileSystem = FileSystem.get(clientConfration);
    }

    // 默認客戶端配置類
    private FileSystem fileSystem;

    public void reloadClientConfigration(DeFaultDfsClientConfigration clientConfration) {
        fileSystem.setConf(clientConfration);
    }

    public FileStatus[] list(String fileName) throws FileNotFoundException, IllegalArgumentException, IOException {
        // TODO Auto-generated method stub
        FileStatus[] statusList = this.fileSystem.listStatus(new Path(fileName));
        return statusList;
    }

    public void text(String fileName) throws IllegalArgumentException, IOException {
        // TODO Auto-generated method stub
        FSDataInputStream inputStream = this.fileSystem.open(new Path(fileName));
        IOUtils.copyBytes(inputStream, System.out, fileSystem.getConf());
    }

    // 上傳文件
    public void upload(String src, String dest) throws IOException {
        // TODO Auto-generated method stub
        FileInputStream in = new FileInputStream(src);
        FSDataOutputStream os = this.fileSystem.create(new Path(dest), true);
        IOUtils.copyBytes(in, os, 4096, true);
    }

    // 刪除文件
    public boolean deleteFile(String dest) throws IllegalArgumentException, IOException {
        // TODO Auto-generated method stub
        boolean success = this.fileSystem.delete(new Path(dest), true);
        return success;
    }

    // 創建文件夾
    public boolean makeDir(String dest) throws IllegalArgumentException, IOException {
        return this.fileSystem.mkdirs(new Path(dest));
    }

    // 下載顯示進度
    public void download2(String dest, Map<String, Integer> descript) throws IllegalArgumentException, IOException {
        FSDataInputStream in = fileSystem.open(new Path(dest));
        descript.put("byteSize", in.available());
        descript.put("current", 0);
        byte[] bs = new byte[1024];
        while (-1 != (in.read(bs))) {
            descript.put("current", descript.get("current") + 1024);
        }
        in.close();
    }

    // 上傳顯示進度
    public void upload2(String src, String dest, Map<String, Long> descript)
            throws IllegalArgumentException, IOException {
        File file = new File(src);
        FileInputStream in = new FileInputStream(file);
        FSDataOutputStream out = this.fileSystem.create(new Path(dest), true);
        descript.put("byteSize", file.length());
        descript.put("current", 0l);
        // 0.5mb
        byte[] bs = new byte[1024 * 1024 / 2];
        while (-1 != (in.read(bs))) {
            out.write(bs);
            descript.put("current", descript.get("current") + 1024);
        }
        out.close();
        in.close();
    }

}

三、以下是JUnitTest測試環境

import java.io.IOException;
import java.text.DecimalFormat;
import java.util.HashMap;
import java.util.Map;

import org.junit.Before;
import org.junit.Test;

import com.huaqin.hdfs.utils.HDFSFileUtils;

public class HDFSFileUtilsJUT {

    @Before
    public void before() throws IOException {
        fileUtils = new HDFSFileUtils();
    }

    HDFSFileUtils fileUtils;

    @Test
    public void testCreateNEWFile() throws IOException {
//        fileUtils.upload("D:\\temp\\helloworld.txt", "/tmp/helloworld.txt");
        fileUtils.upload("E:\\devtool\\hadoop-2.7.3.tar.gz", "/hadoop-2.7.3.tar.gz");
    }
    
    
    @Test
    public void testText() throws IllegalArgumentException, IOException {
        fileUtils.text("/hello.txt");
    }
    
    @Test
    public void testDeleteFile() throws IllegalArgumentException, IOException {
        boolean success = fileUtils.deleteFile("/CentOS-7-x86_64-DVD-1511.iso");
        System.out.println(success);
    }
    
    @Test
    public void testZMikdirs() throws IllegalArgumentException, IOException {
        boolean success = fileUtils.makeDir("/tmp");
        System.out.println(success);
    }
    
    @Test
    public void testdownload2() throws IllegalArgumentException, IOException {
        Map<String, Integer> desc = new HashMap<>();
        desc.put("current", 0);
        desc.put("byteSize", 0);
        new Thread(new Runnable() {
            @Override
            public void run() {
                // TODO Auto-generated method stub
                while (true) {
                    try {
                        Thread.sleep(500);
                        System.out.printf("maxL:%d\tcurrent:%d\tsurplus:%d\n", desc.get("byteSize"),desc.get("current"),desc.get("byteSize")-desc.get("current"));
                    } catch (InterruptedException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                }
            }
        }).start();
        fileUtils.download2("/hadoop-2.7.3.tar.gz",desc);
    }
    
    @Test
    public void testupload2() throws IllegalArgumentException, IOException {
        DecimalFormat df = new DecimalFormat("0.00%");
        
        Map<String, Long> desc = new HashMap<String, Long>();
        desc.put("current", 0l);
        desc.put("byteSize", 0l);
        new Thread(new Runnable() {
            @Override
            public void run() {
                // TODO Auto-generated method stub
                while (true) {
                    try {
                        Thread.sleep(500);
                        System.out.printf("maxL:%d\tcurrent:%d\tsurplus:%d\tprogressBar:%s\n", desc.get("byteSize"),desc.get("current"),desc.get("byteSize")-desc.get("current"),df.format((desc.get("current")+0.0)/desc.get("byteSize")));
                    } catch (InterruptedException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                }
            }
        }).start();
        fileUtils.upload2("D:\\hadoop\\CentOS-7-x86_64-DVD-1511.iso", "/CentOS-7-x86_64-DVD-1511.iso",desc);
    }
    
}

hadoop hdfs (java api)