(8)基於hadoop的簡單網盤應用實現4
阿新 • • 發佈:2018-12-31
檔案結構
(1)、index.jsp首頁面實現
index.jsp
<%@ include file="head.jsp"%> <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%> <%@page import="org.apache.hadoop.fs.FileStatus"%> <body style="text-align:center;margin-bottom:100px;"> <div class="navbar" > <div class="navbar-inner"> <a class="brand" href="#" style="margin-left:200px;">網盤</a> <ul class="nav"> <li><a href="LogoutServlet">退出</a></li> </ul> </div> </div> <div style="margin:0px auto; text-align:left;width:1200px; height:50px;"> <form class="form-inline" method="POST" enctype="MULTIPART/FORM-DATA" action="UploadServlet" > <div style="line-height:50px;float:left;"> <input type="submit" name="submit" value="上傳檔案" /> </div> <div style="line-height:50px;float:left;"> <input type="file" name="file1" size="30"/> </div> </form> </div> <div style="margin:0px auto; width:1200px;height:500px; background:#fff"> <table class="table table-hover" style="width:1000px;margin-left:100px;"> <tr style=" border-bottom:2px solid #ddd"> <td >檔名</td><td style="width:100px">型別</td><td style="width:100px;">大小</td><td style="width:100px;">操作</td><td style="width:100px;">操作</td> </tr> <% FileStatus[] list = (FileStatus[])request.getAttribute("list"); if(list != null) for (int i=0; i<list.length; i++) { %> <tr style="border-bottom:1px solid #eee"> <% if(list[i].isDir()) { out.print("<td> <a href=\"DocumentServlet?filePath="+list[i].getPath()+"\">"+list[i].getPath().getName()+"</a></td>"); }else{ out.print("<td>"+list[i].getPath().getName()+"</td>"); } %> <td><%= (list[i].isDir()?"目錄":"檔案") %></td> <td><%= list[i].getLen()/1024%></td> <td><a href="DeleteFileServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),"GB2312") %>">x</a></td> <td><a href="DownloadServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),"GB2312") %>">下載</a></td> </tr > <% } %> </table> </div> </body>
(2)document.jsp檔案
<%@ include file="head.jsp"%> <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%> <%@page import="org.apache.hadoop.fs.FileStatus"%> <body style="text-align:center;margin-bottom:100px;"> <div class="navbar" > <div class="navbar-inner"> <a class="brand" href="#" style="margin-left:200px;">網盤</a> <ul class="nav"> <li class="active"><a href="#">首頁</a></li> <li><a href="#">Link</a></li> <li><a href="#">Link</a></li> </ul> </div> </div> <div style="margin:0px auto; text-align:left;width:1200px; height:50px;"> <form class="form-inline" method="POST" enctype="MULTIPART/FORM-DATA" action="UploadServlet" > <div style="line-height:50px;float:left;"> <input type="submit" name="submit" value="上傳檔案" /> </div> <div style="line-height:50px;float:left;"> <input type="file" name="file1" size="30"/> </div> </form> </div> <div style="margin:0px auto; width:1200px;height:500px; background:#fff"> <table class="table table-hover" style="width:1000px;margin-left:100px;"> <tr><td>檔名</td><td>屬性</td><td>大小(KB)</td><td>操作</td><td>操作</td></tr> <% FileStatus[] list = (FileStatus[])request.getAttribute("documentList"); if(list != null) for (int i=0; i<list.length; i++) { %> <tr style=" border-bottom:2px solid #ddd"> <% if(list[i].isDir()) { out.print("<td><a href=\"DocumentServlet?filePath="+list[i].getPath()+"\">"+list[i].getPath().getName()+"</a></td>"); }else{ out.print("<td>"+list[i].getPath().getName()+"</td>"); } %> <td><%= (list[i].isDir()?"目錄":"檔案") %></td> <td><%= list[i].getLen()/1024%></td> <td><a href="DeleteFileServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),"GB2312") %>">x</a></td> <td><a href="DownloadServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),"GB2312") %>">下載</a></td> </tr> <% } %> </table> </div> </body> </html>
(3)DeleteFileServlet 檔案
package com.controller; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.mapred.JobConf; import com.model.HdfsDAO; import com.sun.security.ntlm.Server; /** * Servlet implementation class DeleteFileServlet */ public class DeleteFileServlet extends HttpServlet { /** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String filePath = new String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312"); JobConf conf = HdfsDAO.config(); HdfsDAO hdfs = new HdfsDAO(conf); hdfs.rmr(filePath); System.out.println("===="+filePath+"===="); FileStatus[] list = hdfs.ls("/user/root/"); request.setAttribute("list",list); request.getRequestDispatcher("index.jsp").forward(request,response); } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { this.doGet(request, response); } }
(4)UploadServlet檔案
package com.controller;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.jsp.PageContext;
import org.apache.commons.fileupload.DiskFileUpload;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.model.HdfsDAO;
/**
* Servlet implementation class UploadServlet
*/
public class UploadServlet extends HttpServlet {
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
this.doPost(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
request.setCharacterEncoding("UTF-8");
File file ;
int maxFileSize = 50 * 1024 *1024; //50M
int maxMemSize = 50 * 1024 *1024; //50M
ServletContext context = getServletContext();
String filePath = context.getInitParameter("file-upload");
System.out.println("source file path:"+filePath+"");
// 驗證上傳內容了型別
String contentType = request.getContentType();
if ((contentType.indexOf("multipart/form-data") >= 0)) {
DiskFileItemFactory factory = new DiskFileItemFactory();
// 設定記憶體中儲存檔案的最大值
factory.setSizeThreshold(maxMemSize);
// 本地儲存的資料大於 maxMemSize.
factory.setRepository(new File("c:\\temp"));
// 建立一個新的檔案上傳處理程式
ServletFileUpload upload = new ServletFileUpload(factory);
// 設定最大上傳的檔案大小
upload.setSizeMax( maxFileSize );
try{
// 解析獲取的檔案
List fileItems = upload.parseRequest(request);
// 處理上傳的檔案
Iterator i = fileItems.iterator();
System.out.println("begin to upload file to tomcat server</p>");
while ( i.hasNext () )
{
FileItem fi = (FileItem)i.next();
if ( !fi.isFormField () )
{
// 獲取上傳檔案的引數
String fieldName = fi.getFieldName();
String fileName = fi.getName();
String fn = fileName.substring( fileName.lastIndexOf("\\")+1);
System.out.println("<br>"+fn+"<br>");
boolean isInMemory = fi.isInMemory();
long sizeInBytes = fi.getSize();
// 寫入檔案
if( fileName.lastIndexOf("\\") >= 0 ){
file = new File( filePath ,
fileName.substring( fileName.lastIndexOf("\\"))) ;
//out.println("filename"+fileName.substring( fileName.lastIndexOf("\\"))+"||||||");
}else{
file = new File( filePath ,
fileName.substring(fileName.lastIndexOf("\\")+1)) ;
}
fi.write( file ) ;
System.out.println("upload file to tomcat server success!");
System.out.println("begin to upload file to hadoop hdfs</p>");
//將tomcat上的檔案上傳到hadoop上
String username = (String) request.getSession().getAttribute("username");
JobConf conf = HdfsDAO.config();
HdfsDAO hdfs = new HdfsDAO(conf);
hdfs.copyFile(filePath+"\\"+fn, "/"+username+"/"+fn);
System.out.println("upload file to hadoop hdfs success!");
System.out.println("username-----"+username);
FileStatus[] list = hdfs.ls("/"+username);
request.setAttribute("list",list);
request.getRequestDispatcher("index.jsp").forward(request, response);
}
}
}catch(Exception ex) {
System.out.println(ex);
}
}else{
System.out.println("<p>No file uploaded</p>");
}
}
}
(5)DownloadServlet檔案
package com.controller;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.model.HdfsDAO;
/**
* Servlet implementation class DownloadServlet
*/
public class DownloadServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String local = "C:/";
String filePath = new String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");
System.out.println(filePath);
JobConf conf = HdfsDAO.config();
HdfsDAO hdfs = new HdfsDAO(conf);
hdfs.download(filePath, local);
FileStatus[] list = hdfs.ls("/user/root/");
request.setAttribute("list",list);
request.getRequestDispatcher("index.jsp").forward(request,response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
this.doGet(request, response);
}
}
(6)DocumentServlet檔案
package com.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.model.HdfsDAO;
/**
* Servlet implementation class DocumentServlet
*/
public class DocumentServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String filePath = new String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");
JobConf conf = HdfsDAO.config();
HdfsDAO hdfs = new HdfsDAO(conf);
FileStatus[] documentList = hdfs.ls(filePath);
request.setAttribute("documentList",documentList);
request.getRequestDispatcher("document.jsp").forward(request,response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
this.doGet(request, response);
}
}
(7)LougoutServlet檔案
package com.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
* Servlet implementation class LogoutServlet
*/
public class LogoutServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
HttpSession session = request.getSession();
session.removeAttribute("username");
request.getRequestDispatcher("login.jsp").forward(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
this.doGet(request, response);
}
}
到此,一個簡單的基於hadoop的網盤應用就完成了,如果想把它做的更像一個真正的網盤,大家可以花多點時間去實現剩下的功能。
原始碼下載地址:http://download.csdn.net/detail/wen294299195/7779949