package com.ud.uploadfiledemo.controller;
import com.ud.uploadfiledemo.model.ChunkInfo;
import com.ud.uploadfiledemo.model.MergeStatus;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import java.time.Instant;
import java.io.*;
import java.nio.channels.FileChannel;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@RestController
@RequestMapping("/upload")
@Slf4j
public class FileUploadController {
@Value("${file.upload.path}")
private String uploadPath;
// 用于记录正在合并的文件
private final Set<String> mergingFiles = ConcurrentHashMap.newKeySet();
// 用于存储合并进度的Map
private final ConcurrentHashMap<String, MergeStatus> mergeStatusMap = new ConcurrentHashMap<>();
private final TaskScheduler taskScheduler;
public FileUploadController() {
ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
scheduler.setPoolSize(1);
scheduler.initialize();
this.taskScheduler = scheduler;
}
@PostMapping("/chunk")
public ResponseEntity<String> uploadChunk(@RequestParam("file") MultipartFile file,
ChunkInfo chunkInfo) {
try {
// 创建文件块存储目录
String chunkDirPath = uploadPath + File.separator + chunkInfo.getFileMd5();
File chunkDir = new File(chunkDirPath);
if (!chunkDir.exists()) {
chunkDir.mkdirs();
}
// 写入文件块
File chunkFile = new File(chunkDirPath + File.separator + chunkInfo.getChunkNumber());
file.transferTo(chunkFile);
// 检查是否所有块都已上传,并且当前文件不在合并过程中
if (checkIfAllChunksUploaded(chunkInfo) && !mergingFiles.contains(chunkInfo.getFileMd5())) {
try {
// 标记文件正在合并
mergingFiles.add(chunkInfo.getFileMd5());
mergeChunks(chunkInfo);
} finally {
// 合并完成后移除标记
mergingFiles.remove(chunkInfo.getFileMd5());
}
}
return ResponseEntity.ok("Chunk uploaded successfully");
} catch (IOException e) {
log.error("Upload chunk failed", e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("Upload failed: " + e.getMessage());
}
}
@GetMapping("/chunk/verify")
public ResponseEntity<Set<Integer>> verifyChunk(@RequestParam String fileMd5) {
// 获取已上传的块号
Set<Integer> uploadedChunks = getUploadedChunks(fileMd5);
return ResponseEntity.ok(uploadedChunks);
}
private Set<Integer> getUploadedChunks(String fileMd5) {
Set<Integer> uploadedChunks = new HashSet<>();
File chunkDir = new File(uploadPath + File.separator + fileMd5);
if (chunkDir.exists()) {
File[] files = chunkDir.listFiles();
if (files != null) {
for (File file : files) {
uploadedChunks.add(Integer.parseInt(file.getName()));
}
}
}
return uploadedChunks;
}
private boolean checkIfAllChunksUploaded(ChunkInfo chunkInfo) {
Set<Integer> uploadedChunks = getUploadedChunks(chunkInfo.getFileMd5());
return uploadedChunks.size() == chunkInfo.getTotalChunks();
}
@GetMapping("/merge/status")
public ResponseEntity<MergeStatus> getMergeStatus(@RequestParam String fileMd5) {
MergeStatus status = mergeStatusMap.getOrDefault(fileMd5, new MergeStatus("waiting", 0));
return ResponseEntity.ok(status);
}
private void mergeChunks(ChunkInfo chunkInfo) throws IOException {
String fileMd5 = chunkInfo.getFileMd5();
try {
mergeStatusMap.put(fileMd5, new MergeStatus("merging", 0));
String chunkDirPath = uploadPath + File.separator + fileMd5;
// 处理文件路径,替换非法字符
String sanitizedFilename = chunkInfo.getFilename().replace('\\', '/');
Path targetPath = Paths.get(uploadPath, sanitizedFilename);
File targetDir = targetPath.getParent().toFile();
if (!targetDir.exists()) {
targetDir.mkdirs();
}
log.info("Merging chunks to: {}", targetPath);
// 获取总块数用于计算进度
int totalChunks = chunkInfo.getTotalChunks();
int processedChunks = 0;
try (FileChannel outChannel = new FileOutputStream(targetPath.toFile()).getChannel()) {
for (int i = 1; i <= totalChunks; i++) {
File chunk = new File(chunkDirPath + File.separator + i);
if (!chunk.exists()) {
throw new IOException("Chunk file missing: " + i);
}
try (FileChannel inChannel = new FileInputStream(chunk).getChannel()) {
inChannel.transferTo(0, inChannel.size(), outChannel);
}
processedChunks++;
// 更新合并进度
int progress = (processedChunks * 100) / totalChunks;
mergeStatusMap.put(fileMd5, new MergeStatus("merging", progress));
}
}
// 删除临时文件
try {
Thread.sleep(100);
deleteChunkDir(new File(chunkDirPath));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.warn("Interrupted while waiting to delete chunks", e);
}
// 更新状态为完成
mergeStatusMap.put(fileMd5, new MergeStatus("completed", 100));
log.info("File merged successfully: {}", targetPath);
} catch (Exception e) {
mergeStatusMap.put(fileMd5, new MergeStatus("error", 0, e.getMessage()));
throw e;
} finally {
// 5秒后清除状态
taskScheduler.schedule(
() -> mergeStatusMap.remove(fileMd5),
Instant.now().plusSeconds(5)
);
}
}
private void deleteChunkDir(File dir) {
if (dir.exists()) {
File[] files = dir.listFiles();
if (files != null) {
for (File file : files) {
if (!file.delete()) {
log.warn("Failed to delete chunk file: {}", file.getAbsolutePath());
}
}
}
if (!dir.delete()) {
log.warn("Failed to delete chunk directory: {}", dir.getAbsolutePath());
}
}
}
}
JAVA大文件上传源代码
需积分: 0 147 浏览量
更新于2025-01-16
收藏 35KB ZIP 举报
JAVA 大文件上传项目
## 项目简介
这是一个基于 Spring Boot 实现的大文件分片上传项目,支持断点续传、文件秒传、实时进度显示等功能。项目采用前后端分离架构,使用 Thymeleaf 作为模板引擎,实现了高效可靠的文件上传功能。
## 核心特性
1. **分片上传**
- 自动将大文件切分为2MB大小的分片
- 支持并发上传多个分片
- 实时显示上传进度
2. **断点续传**
- 记录已上传分片信息
- 支持暂停/继续上传
- 网络中断自动恢复
3. **文件秒传**
- 基于MD5文件指纹
- 秒级验证文件是否已存在
- 避免重复上传
4. **实时进度**
- 双进度条设计(上传进度和合并进度)
- 精确的时间统计
- 友好的状态提示
## 技术栈
- 后端:Spring Boot 3.4.1
- 前端:HTML5 + JavaScript
- 模板引擎:Thymeleaf
- 文件处理:Apache Commons IO
- 构建工具:Maven


熊文豪
- 粉丝: 7657
- 资源: 7
最新资源
- 26-0222横纵轴归一化说明-1080P 高清-AVC.mp4
- 25-0221多条直线围成代价敏感曲线-1080P 高清-AVC.mp4
- COMSOL光学仿真:光镊_光力模型专题(包含近似算法、张量算法及三个模型).pdf
- COMSOL光学仿真:负折射率BIC与芯片内负折射现象研究.pdf
- COMSOL光学波导传输仿真:三维弯曲、模场分布与波束包络方法及其FDTD计算模式弯曲损耗.pdf
- COMSOL光学仿真:光镊_光力模型专题(近似算法、张量算法及三个模型).pdf
- COMSOL光学仿真:光子晶体光纤、微纳光学及滤波器等的研究与复现.pdf
- COMSOL光学仿真:光子晶体光纤、基于SPR的光纤传感器与模式分析研究.pdf
- 24-0220实例说明一个阈值对应一条直线-1080P 高清-AVC.mp4
- COMSOL光学仿真:轨道自旋锁定手性BIC的能带简并与Chiral BIC的区分,圆场偏振矢量及椭圆率计算模型复现及可视化.pdf
- COMSOL光学仿真:轨道自旋锁定手性双曲线(BIC)的能带简并与区分chiral BIC,圆场偏振矢量及椭圆率计算模型.pdf
- COMSOL光学仿真:光子晶体光纤与COMLOS微纳光学仿真研究.pdf
- Comsol光学仿真:连续域束缚态BIC能带、Q因子、远场及角分辨率透射光谱计算.pdf
- COMSOL光学仿真:介电常数近零薄膜等离子体谐振折射率传感.pdf
- Comsol光学仿真:连续域束缚态BIC的te、tm模式耦合与透射光谱远场偏振矢量的导出数据计算.pdf
- 27-0223假设检验目的-1080P 高清-AVC.mp4