oss存储分片的简单思路

思路:

前端:1.计算要分片的文件,分片总数

2.调用后端初始化分片信息

3.查询分片进度

4.上传分片

后端:1.初始化分片信息(redis)(文件名/分片总数/已上传分片书/上传时间)

2.检查分片数(已上传哪些分片)

3.上传分片(通过sha-256算法计算分片值)

4.合并分片(校验分片是否全部上传,如果有未上传的返回给前端)

5.上传到oss

前端代码:仅供参考

javascript 复制代码
// 完整的uploadFile函数
async function uploadFile() {
    const fileInput = document.getElementById('fileInput');
    const file = fileInput.files[0];
    if (!file) {
        alert('请选择文件');
        return;
    }
    
    const fileName = file.name;
    const chunkSize = 1024 * 10; // 10KB
    const totalChunks = Math.ceil(file.size / chunkSize);
    
    // 1. 初始化上传
    const initResponse = await fetch('/hospital/file2/init', {
        method: 'POST',
        headers: {
            'Content-Type': 'application/x-www-form-urlencoded',
        },
        body: `fileName=${encodeURIComponent(fileName)}&totalChunks=${totalChunks}`
    });
    
    if (!initResponse.ok) {
        console.error('初始化失败');
        alert('上传初始化失败');
        return;
    }
    
    const fileId = await initResponse.text();
    console.log('文件ID:', fileId);
    
    // 2. 检查上传进度
    const progressResponse = await fetch(`/hospital/file2/progress/${fileId}`);
    if (progressResponse.ok) {
        const progressData = await progressResponse.json();
        const uploadedChunks = progressData.uploadedChunkIndexes || [];
        
        console.log(`已上传 ${uploadedChunks.length}/${totalChunks} 个分片`);
        
        // 3. 上传未完成的分片
        for (let i = 0; i < totalChunks; i++) {
            if (!uploadedChunks.includes(i)) {
                try {
                    await uploadChunk(file, i, fileId, chunkSize);
                    console.log(`分片 ${i} 上传成功`);
                } catch (error) {
                    console.error(`分片 ${i} 上传失败`, error);
                    // 重试逻辑
                    let retryCount = 0;
                    while (retryCount < 3) {
                        try {
                            await uploadChunk(file, i, fileId, chunkSize);
                            console.log(`分片 ${i} 重试成功`);
                            break;
                        } catch (retryError) {
                            retryCount++;
                            if (retryCount === 3) {
                                alert(`分片 ${i} 上传失败,请重新上传`);
                                return;
                            }
                        }
                    }
                }
            }
        }
    }
    
    // 4. 合并文件
    try {
        const result = await mergeFile(fileId);
        console.log('文件上传成功:', result);
        alert('文件上传成功');
    } catch (error) {
        console.error('合并失败', error);
        alert('文件合并失败');
    }
}

// 上传分片
async function uploadChunk(file, chunkIndex, fileId, chunkSize) {
    const chunk = file.slice(chunkIndex * chunkSize, 
                            Math.min((chunkIndex + 1) * chunkSize, file.size));
    
    const checksum = await calculateHash(chunk);
    
    const formData = new FormData();
    formData.append('chunk', chunk);
    formData.append('chunkIndex', chunkIndex);
    formData.append('chunkChecksum', checksum);
    formData.append('fileId', fileId);
    
    const response = await fetch('/hospital/file2/upload', {
        method: 'POST',
        body: formData
    });
    
    if (!response.ok) {
        const errorText = await response.text();
        throw new Error(`上传失败: ${errorText}`);
    }
    
    return await response.json();
}

// 合并文件
async function mergeFile(fileId) {
    const response = await fetch('/hospital/file2/merge', {
        method: 'POST',
        headers: {
            'Content-Type': 'application/x-www-form-urlencoded',
        },
        body: `fileId=${fileId}`
    });
    
    if (!response.ok) {
        if (response.status === 400) {
            // 如果有缺失的分片,返回缺失分片列表
            const missingChunks = await response.json();
            throw new Error(`缺少分片: ${JSON.stringify(missingChunks)}`);
        }
        const errorText = await response.text();
        throw new Error(`合并失败: ${errorText}`);
    }
    
    return await response.json();
}

// 计算文件分片的SHA-256哈希
function calculateHash(chunk) {
    return new Promise((resolve, reject) => {
        const reader = new FileReader();
        reader.readAsArrayBuffer(chunk);
        reader.onload = async () => {
            try {
                const arrayBuffer = reader.result;
                const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer);
                const hashArray = Array.from(new Uint8Array(hashBuffer));
                const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
                resolve(hashHex);
            } catch (error) {
                reject(error);
            }
        };
        reader.onerror = () => {
            reject(new Error('读取文件分片失败'));
        };
    });
}

// 查询上传进度
async function checkProgress(fileId) {
    try {
        const response = await fetch(`/hospital/file2/progress/${fileId}`);
        if (response.ok) {
            const progressData = await response.json();
            return progressData;
        }
    } catch (error) {
        console.error('查询进度失败', error);
    }
    return null;
}

// 暂停上传(示例函数)
function pauseUpload() {
    // 可以设置一个标志位来暂停上传
    isUploadPaused = true;
}

// 继续上传(示例函数)
async function resumeUpload(fileId) {
    isUploadPaused = false;
    const progressData = await checkProgress(fileId);
    if (progressData) {
        console.log('继续上传,进度:', progressData.progress + '%');
        // 根据进度信息继续上传
    }
}

后端代码:仅供参考

java 复制代码
 @RestController
    @RequestMapping("/file2")
    @Slf4j
    public class File2Controller {
        private static final String FILE_UPLOAD_PREFIX = "file_upload:";

        @Autowired
        private RedisTemplate<String, Object> redisTemplate;

        @Value("${my.config.savePath}")
        private String uploadPath;

        /**
         * 初始化分片信息
         * @param fileName
         * @param totalChunks
         * @return
         */
        @PostMapping("/init")
        public ResponseEntity<?> initUpload(@RequestParam("fileName") String fileName,
                                            @RequestParam("totalChunks") Integer totalChunks) {
            String fileId = UUID.randomUUID().toString();
            String key = FILE_UPLOAD_PREFIX + fileId;

            // 存储文件元信息
            Map<String, Object> metadata = new HashMap<>();
            metadata.put("fileName", fileName); // 文件名
            metadata.put("totalChunks", totalChunks); // 分片总数
            metadata.put("uploadTime", System.currentTimeMillis()); // 上传时间
            metadata.put("uploadedChunks", 0); // 已上传分片数

            redisTemplate.opsForHash().putAll(key, metadata);
            // 设置过期时间,比如24小时
            redisTemplate.expire(key, 24, TimeUnit.HOURS);

            return ResponseEntity.ok(fileId);
        }

        /**
         * 获取上传进度
         * @param fileId
         * @return
         */
        @GetMapping("/progress/{fileId}")
        public ResponseEntity<?> getUploadProgress(@PathVariable String fileId) {
            String key = FILE_UPLOAD_PREFIX + fileId;

            if (!redisTemplate.hasKey(key)) {
                return ResponseEntity.status(HttpStatus.NOT_FOUND)
                        .body("File not found");
            }

            Map<Object, Object> metadata = redisTemplate.opsForHash().entries(key);
            Integer totalChunks = (Integer) metadata.get("totalChunks");
            Integer uploadedChunks = (Integer) metadata.get("uploadedChunks");

            // 获取已上传的分片索引
            List<Integer> uploadedChunkIndexes = new ArrayList<>();
            Set<Object> keys = redisTemplate.opsForHash().keys(key);
            for (Object k : keys) {
                if (k.toString().startsWith("chunk_")) {
                    uploadedChunkIndexes.add(Integer.parseInt(k.toString().substring(6)));
                }
            }

            Map<String, Object> response = new HashMap<>();
            response.put("totalChunks", totalChunks); // 总分片数
            response.put("uploadedChunks", uploadedChunks); // 已上传分片数
            response.put("uploadedChunkIndexes", uploadedChunkIndexes); // 已上传分片索引(第几个分片)
            response.put("progress", (uploadedChunks * 100.0) / totalChunks); // 上传进度

            return ResponseEntity.ok(response);
        }

        /**
         * 上传分片
         * @param chunk
         * @param chunkIndex
         * @param chunkChecksum
         * @param fileId
         * @return
         */
        @PostMapping("/upload")
        public ResponseEntity<?> uploadFile(@RequestParam("chunk") MultipartFile chunk, // 分片文件
                                            @RequestParam("chunkIndex") Integer chunkIndex, // 分片索引(第几个分片)
                                            @RequestParam("chunkChecksum") String chunkChecksum, // 前端SHA-256计算后的对比值
                                            @RequestParam("fileId") String fileId) throws Exception {

            String key = FILE_UPLOAD_PREFIX + fileId;

            // 检查文件是否存在
            if (!redisTemplate.hasKey(key)) {
                return ResponseEntity.status(HttpStatus.NOT_FOUND)
                        .body("File not initialized or expired");
            }

            byte[] chunkBytes = chunk.getBytes();
            String actualChecksum = calculateHash(chunkBytes);

            if (!chunkChecksum.equals(actualChecksum)) {
                return ResponseEntity.status(HttpStatus.BAD_REQUEST)
                        .body("Chunk checksum does not match");
            }

            // 存储分片内容
            redisTemplate.opsForHash().put(key, "chunk_" + chunkIndex, chunkBytes);

            // 更新已上传分片数
            redisTemplate.opsForHash().increment(key, "uploadedChunks", 1);

            // 获取已上传分片列表
            List<Integer> uploadedChunks = new ArrayList<>();
            Set<Object> keys = redisTemplate.opsForHash().keys(key);
            for (Object k : keys) {
                if (k.toString().startsWith("chunk_")) {
                    uploadedChunks.add(Integer.parseInt(k.toString().substring(6)));
                }
            }

            Map<String, Object> response = new HashMap<>();
            response.put("fileId", fileId);
            response.put("uploadedChunks", uploadedChunks);

            return ResponseEntity.ok(response);
        }


        /**
         * 合并文件
         * @param fileId
         * @return
         * @throws IOException
         */
        @PostMapping("/merge")
        public ResponseEntity<?> mergeFile(@RequestParam("fileId") String fileId) throws IOException {
            String key = FILE_UPLOAD_PREFIX + fileId;

            Map<Object, Object> allData = redisTemplate.opsForHash().entries(key);

            if (allData.isEmpty()) {
                return ResponseEntity.status(HttpStatus.NOT_FOUND)
                        .body("File not found");
            }

            String fileName = (String) allData.get("fileName");
            Integer totalChunks = (Integer) allData.get("totalChunks");

            // 检查是否所有分片都已上传
            List<Integer> missingChunks = new ArrayList<>();
            for (int i = 0; i < totalChunks; i++) {
                if (!allData.containsKey("chunk_" + i)) {
                    missingChunks.add(i);
                }
            }

            if (!missingChunks.isEmpty()) {
                return ResponseEntity.status(HttpStatus.BAD_REQUEST)
                        .body(missingChunks);
            }

            // 合并文件
            File outputFile = new File(uploadPath, fileName);
            try (FileOutputStream fos = new FileOutputStream(outputFile)) {
                for (int i = 0; i < totalChunks; i++) {
                    byte[] chunkData = (byte[]) allData.get("chunk_" + i);
                    fos.write(chunkData);
                }
            }

            // 清理Redis数据
            redisTemplate.delete(key);

            return ResponseEntity.ok()
                    .body(Collections.singletonMap("url", "/download/" + fileName));
        }

        /**
         * 计算文件哈希值
         * @param fileChunk
         * @return
         * @throws Exception
         */
        public static String calculateHash(byte[] fileChunk) throws Exception {
            MessageDigest md = MessageDigest.getInstance("SHA-256");
            md.update(fileChunk);
            byte[] hash = md.digest();
            StringBuilder hexString = new StringBuilder();
            for (byte b : hash) {
                hexString.append(String.format("%02x", b));
            }
            return hexString.toString();
        }
    }
相关推荐
血小板要健康15 小时前
Java基础常见面试题复习合集1
java·开发语言·经验分享·笔记·面试·学习方法
淼淼76315 小时前
安装jdk1.8
java·开发语言
毕设源码-朱学姐15 小时前
【开题答辩全过程】以 高校食堂餐饮管理系统的设计与实现为例,包含答辩的问题和答案
java
过期动态15 小时前
Java开发中的@EnableWebMvc注解和WebMvcConfigurer接口
java·开发语言·spring boot·spring·tomcat·maven·idea
摇滚侠15 小时前
IDEA 定义返回值快捷键
java·ide·intellij-idea
毕设源码-郭学长15 小时前
【开题答辩全过程】以 高校考勤管理系统为例,包含答辩的问题和答案
java·eclipse
A懿轩A16 小时前
【Maven 构建工具】从零到上手 Maven:安装配置 + IDEA 集成 + 第一个项目(保姆级教程)
java·maven·intellij-idea
野犬寒鸦16 小时前
从零起步学习并发编程 || 第一章:初步认识进程与线程
java·服务器·后端·学习
我爱娃哈哈16 小时前
SpringBoot + Flowable + 自定义节点:可视化工作流引擎,支持请假、报销、审批全场景
java·spring boot·后端
XiaoFan01216 小时前
将有向工作流图转为结构树的实现
java·数据结构·决策树