1. 程式人生 > >IDEA編譯器連結虛擬機器中HaDoop使用Maven

IDEA編譯器連結虛擬機器中HaDoop使用Maven

IDEA:2018.2.3
VMware:14
HaDoop:2.7.1
juit:4.12

  1. 新建一個Maven專案(不需要選擇型別)
    這裡寫圖片描述
  2. 填寫GroupId和 ArtifactId(這個根據大家習慣來寫,如果有好的建議歡迎在下面留言)
    這裡寫圖片描述
  3. 然後選擇檔案儲存路徑(個人建議路徑裡的資料夾名儘量不要有中文)
    這裡寫圖片描述
  4. 等待專案建立,可以在等待的間隙吧Maven倉庫的地址修改一下,畢竟Maven下載包還是挺佔地方的

這裡寫圖片描述
載入好之後我一般都會選擇自動引入包
這裡寫圖片描述
先找到pom.xml檔案
這裡寫圖片描述
然後新增dependency注意,HaDoop的版本號要和虛擬機器裡的一樣

<dependencies>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.12</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.7.1</version> </dependency> <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
<dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.7.1</version> </dependency> <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>2.7.1</version> </dependency> </dependencies>
  1. bhjb在main/java下新建class
    這裡寫圖片描述
  2. 這裡使用Junit的測試方法,大家可以試一下
    其中的demo是我自己做的一個練習,因為還沒有學檔案複製所以操作看起來有點費事
 public class HdfsTest {
    //配置連結虛擬機器的IP
    public static final String HDFS_PATH = "hdfs://192.168.171.128:9000";
    //hdfs檔案系統
    FileSystem fileSystem = null;
    //獲取環境物件
    Configuration configuration = null;

    @Before
    public void setUp() throws Exception {
        configuration = new Configuration();
        fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration);
        System.out.println("HDFS APP SETUP");
    }

    @Test
    public void create() throws IOException {
    //建立檔案
        FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/test/a.txt"));
        outputStream.write("hello hadoop".getBytes());
        outputStream.flush();
        outputStream.close();
    }

    @Test
    public void cat() throws IOException {
        FSDataInputStream inputStream = fileSystem.open(new Path("/hdfsapi/test/a.txt"));
        IOUtils.copyBytes(inputStream, System.out, 1024);
        inputStream.close();
    }

    @Test
    public void mkdir() throws IOException {
        fileSystem.mkdirs(new Path("/hdfsapi/test"));
    }

    @Test
    public void upset() throws URISyntaxException, IOException {
    //上傳檔案,路徑大家記得改一下
        String file = "E:/hadoopTest/output/test.txt";
        InputStream inputStream = new FileInputStream(new File(file));
        FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/park/aaa.txt"));
        IOUtils.copyBytes(inputStream, outputStream, configuration);
//        fileSystem.copyFromLocalFile();底層是呼叫了IOUtils.copyBytes()
    }

    @Test
    public void download() throws URISyntaxException, IOException {
//        獲取輸入流
        InputStream in = fileSystem.open(new Path("/park/2.txt"));
//        獲取輸出流
        String file = "E:/hadoopTest/output/test.txt";
        OutputStream outputStream = new FileOutputStream(new File(file));
        IOUtils.copyBytes(in, outputStream, configuration);
        in.close();
        outputStream.close();
    }

    @Test
    public void demo1() throws URISyntaxException, IOException {
        configuration = new Configuration();
        fileSystem = (FileSystem) FileSystem.get(new URI(HDFS_PATH), configuration);
//        1、在hdfs建立目錄teacher。
//        2、在teacher目錄下上傳檔案score.txt。
        String file = "E:/hadoopTest/score.txt";
        InputStream inputStream = new FileInputStream(new File(file));
        OutputStream outputStream = fileSystem.create(new Path("/hdfs/teacher/score.txt"));
        IOUtils.copyBytes(inputStream, outputStream, configuration);
//        3、在hdfs建立目錄student,並在student目錄下建立新目錄Tom、LiMing、Jerry.
        fileSystem.mkdirs(new Path("/hdfs/student/Tom"));
        fileSystem.mkdirs(new Path("/hdfs/student/LiMing"));
        fileSystem.mkdirs(new Path("/hdfs/student/Jerry"));
//        4、在Tom目錄下上傳information.txt,同時上傳到LiMing、Jerry目錄下。
        file = "E:/hadoopTest/information.txt";
        inputStream = new FileInputStream(new File(file));
        outputStream = fileSystem.create(new Path("/hdfs/student/Tom/information.txt"));
        IOUtils.copyBytes(inputStream, outputStream, configuration);
//        file = "E:/hadoopTest/information.txt";
        inputStream = new FileInputStream(new File(file));

        outputStream = fileSystem.create(new Path("/hdfs/student/LiMing/information.txt"));
        IOUtils.copyBytes(inputStream, outputStream, configuration);
//        file = "E:/hadoopTest/information.txt";
        inputStream = new FileInputStream(new File(file));

        outputStream = fileSystem.create(new Path("/hdfs/student/Jerry/information.txt"));
        IOUtils.copyBytes(inputStream, outputStream, configuration);
//        5、將student重新命名為MyStudent。
        fileSystem.rename(new Path("/hdfs/student"), new Path("/hdfs/MyStudent"));
//        6、將Tom下的information.txt下載到E:/tom目錄中
        file = "E:/tom";
        inputStream = fileSystem.open(new Path("/hdfs/MyStudent/Tom/information.txt"));
        outputStream = new FileOutputStream(new File(file));
        IOUtils.copyBytes(inputStream, outputStream, configuration);
//        7、將teacher下的score.txt也下載到此目錄
        inputStream = fileSystem.open(new Path("/hdfs/teacher/score.txt"));
        outputStream = new FileOutputStream(new File(file));
        IOUtils.copyBytes(inputStream, outputStream, configuration);
//        8、刪除hdfs中的Tom、LiMing目錄
        fileSystem.delete(new Path("/hdfs/Tom"), true);
        fileSystem.delete(new Path("/hdfs/LiMing"), true);
        inputStream.close();
        outputStream.close();
    }

    @After
    public void tearDown() throws Exception {
        fileSystem.close();
        configuration = null;
        System.out.println("HDFS APP SHUTDOWN");
    }
}