package cn.demo.controller;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.MultipartHttpServletRequest;
import org.springframework.web.multipart.commons.CommonsMultipartFile;
import org.springframework.web.multipart.commons.CommonsMultipartResolver;
import cn.demo.domain.FileBean;
import cn.demo.service.IFileService;
@Controller
@RequestMapping("/file")
public class FileController {
@Autowired
private IFileService fileService;
//使用springMVC解析器进行文件上传
@RequestMapping("/uploadToHDFS2.action")
public String uploadToHDFS2(HttpServletRequest request,HttpServletResponse response) throws IllegalStateException, IOException, Exception, URISyntaxException{
//创建一个通用的多部分解析器
CommonsMultipartResolver multipartResolver = new CommonsMultipartResolver(request.getSession().getServletContext());
if(multipartResolver.isMultipart(request)){
//转换成多部分request
MultipartHttpServletRequest multiRequest = (MultipartHttpServletRequest)request;
//取得request中的所有文件名
Iterator<String> iter = multiRequest.getFileNames();
while(iter.hasNext()){
//取得上传文件
MultipartFile file = multiRequest.getFile(iter.next());
if(file != null){
//取得当前上传文件的文件名称
String myFileName = file.getOriginalFilename();
//如果名称不为“”,说明该文件存在,否则说明该文件不存在
if(myFileName.trim() !=""){
System.out.println("======>"+myFileName);
//文件名
String fileName = file.getOriginalFilename();
String filePath = "/1127/"+fileName;
//--------------------------------------------
//上传到hdfs
Configuration configuration = new Configuration();
configuration.addResource("/core-site.xml");
configuration.addResource("/hdfs-site.xml");
FileSystem fs = FileSystem.get(new URI("hdfs://ns1"),configuration ,"root");
byte[] bytes = file.getBytes();
InputStream inputStream = new ByteArrayInputStream(bytes);
FSDataOutputStream outputStream = fs.create(new Path(filePath));
IOUtils.copyBytes(inputStream, outputStream, 4096, true);
//记录上传文件
fileService.addFile(fileName,filePath);
//--------------------------------------------
}
}
}
}
return "success";
}
@RequestMapping("/uploadToHDFS.action")
public String uploadToHDFS(@RequestParam("file") CommonsMultipartFile file,HttpServletRequest request,HttpServletResponse response) throws Exception{
//拿到上传文件的输入流
FileInputStream inputStream = (FileInputStream) file.getInputStream();
String fileName = file.getOriginalFilename();
String filePath = "/1127/"+UUID.randomUUID()+fileName;
System.out.println("****************>"+fileName);
//上传到hdfs
Configuration configuration = new Configuration();
configuration.addResource("/core-site.xml");
configuration.addResource("/hdfs-site.xml");
FileSystem fs = FileSystem.get(new URI("hdfs://ns1"),configuration ,"root");
FSDataOutputStream outputStream = fs.create(new Path(filePath));
IOUtils.copyBytes(inputStream, outputStream, 4096, true);
return "success";
}
@RequestMapping("/listHDFS.action")
public String listHDFS(HttpServletRequest request,HttpServletResponse response,Model model) throws Exception{
Configuration configuration = new Configuration();
configuration.addResource("/core-site.xml");
configuration.addResource("/hdfs-site.xml");
FileSystem fs = FileSystem.get(new URI("hdfs://ns1"),configuration ,"root");
ArrayList<FileBean> fileList = new ArrayList<FileBean>();
Path path = new Path("/1127");
FileStatus[] status;
try {
status = fs.listStatus(path);
for(FileStatus f : status){
fileList.add(new FileBean(f));
}
} catch (Exception e) {
e.printStackTrace();
}
model.addAttribute("fileList",fileList);
return "success";
}
@RequestMapping("/download.action")
public String download(String path,String fileName,HttpServletRequest request,HttpServletResponse response,Model model) throws Exception{
Configuration configuration = new Configuration();
configuration.addResource("/core-site.xml");
configuration.addResource("/hdfs-site.xml");
FileSystem fs = FileSystem.get(new URI("hdfs://ns1"),configuration ,"root");
String tempPath = new String(path.getBytes("ISO-8859-1"),"UTF-8");
String[] ps = tempPath.split("//ns1");
if(ps.length>1){
path = ps[1];
}else {
throw new Exception();
}
System.out.println(path);
FSDataInputStream inputStream = fs.open(new Path(path));
response.setContentType("application/octet-stream");
response.setHeader("Content-disposition", "attachment; filename="+fileName);
//输出流
BufferedOutputStream bos = new BufferedOutputStream(response.getOutputStream());
byte[] buff = new byte[2048];
int bytesRead;
while (-1 != (bytesRead = inputStream.read(buff, 0, buff.length))) {
bos.write(buff, 0, bytesRead);
}
bos.flush();
inputStream.close();
bos.close();
return "success";
}
}
- 1
- 2
- 3
前往页