项目场景:
在一些涉及报表的功能时候会需要导入excle数据,之前写过一个是一条一条传入的,数据传输太慢了,所以结合网络资料整了一个优化
问题描述:
导入速度慢
原因分析:
一条条传入时间过慢
解决方案:
前后端优化
前端:
1.采用elementui组件
<el-button type="warning" icon="el-icon-folder-add" style="margin-left: 180px;" @click="submit" :disabled="disable">提交文件</el-button>
<div style="margin-left: 260px;margin-top: -40px"> <el-upload action :on-change="handle" :auto-upload="false" :show-file-list="false" accept=".xls, .xlsx" > <el-button type="primary" icon="el-icon-upload" >点击上传</el-button> </el-upload> </div>
2.js部分调用自己封装好的js对数据进行解析
async handle(ev) { //console.log(this.options.label) let file = ev.raw; if (!file) return; let loadingInstance = Loading.service({ text: "拼命加载中", background: 'rgba(0,0,0,.5)' }) await delay(1000); let data = await readFile(file); let workbook = this.XLSX.read(data, {type: "binary"}), worksheet = workbook.Sheets[workbook.SheetNames[0]] data = this.XLSX.utils.sheet_to_json(worksheet); /** * 把读取的数据转出传递给后端的数据(姓名:name 电话:phone) * @type {*[]} */ let arr = []; data.forEach(item => { let obj = {}; for (let key in character) { if (!character.hasOwnProperty(key)) break; let v = character[key], text = v.text, type = v.type; v = item[text] || ""; // console.log(type) // console.log(v) type === "string" ? v = (String(v)) : null; type === "number" ? v = (Number(v) * 100).toFixed(2) : null; type === "int" ? v = Math.round(Number(v)) : null; type === "time" ? v = convertToStandardTime(v) : null; obj[key] = v; } arr.push(obj); }) await delay(100) this.tableData = arr; loadingInstance.close(); this.disable = false; this.$message({ message: '上传成功!!', type: 'success', showClose: true }); //console.log(arr) },
js文件: 需要解析哪些字段定义好就行了,后端也可以用实体类接收 我是直接前端做的
javascript
//文件按照二进制格式读取
export function readFile(file){
return new Promise(resolve => {
let reader = new FileReader();
reader.readAsBinaryString(file);
reader.onload = ev => {
resolve(ev.target.result);
}
})
}
//设置异步延迟
export function delay(interval = 0){
return new Promise(resolve => {
let timer = setTimeout(_=>{
clearTimeout(timer);
resolve();
},interval)
})
}
export function convertToStandardTime(v) {
let date;
// 检查输入是否为 Excel 的日期格式(天数)
if (typeof v === 'number' && v > 25569) {
// Excel 的日期从1900年1月1日开始,减去25569天转换为Unix时间戳
date = new Date((v - 25569) * 86400 * 1000);
} else {
// 否则,假设输入是标准的日期字符串
date = new Date(v);
}
// 使用 toLocaleString 方法格式化日期和时间
const standardTime = date.toLocaleString('zh-CN', {
year: 'numeric',
month: '2-digit',
day: '2-digit',
// hour: '2-digit',
// minute: '2-digit',
// second: '2-digit',
hour12: false
});
return standardTime;
}
export let character = {
YEAR:{
text:"年份",
type:'string'
},
CYCLE:{
text:'周期 季度1q、2q 月度01m、02m.. ',
type:'string'
},
NXBUDGET:{
text:'农险预算值',
type:'int'
},
COMCODE:{
text:'机构代码',
type:'string'
},
COMNAME:{
text:'机构名称',
type:'string'
},
NEWCHNLTYPE:{
text:'清分后渠道类型',
type:'string'
},
BUDGET:{
text:'预算值',
type:'int'
},
}
3.提交部分:
async submit() { if (this.tableData.length <= 0) { this.$message({ message: '请先选择一个Excel文件!', type: 'warning', showClose: true }); return; } // 检查是否选择了目标表 if (!this.tableNames) { this.$message({ message: '请先选择一个需要传入的表', type: 'warning', showClose: true }); return; } // 针对特定表的确认提示 if (this.tableNames === 'CONNECTION') { try { await this.$confirm( '此操作只能导入两年内的数据,大于两年的数据不做生效是否继续!', '提示', { confirmButtonText: '确定', cancelButtonText: '取消', type: 'warning', center: true } ); } catch (e) { this.$message({ type: 'info', message: '已取消导入!' }); return; } } // 显示加载状态 this.disable = true; const loadingInstance = Loading.service({ text: "正在上传数据", background: 'rgba(0,0,0,.5)' }); try { // 构造请求体,包含所有数据和表名 const requestData = { tableNames: this.tableNames, data: this.tableData // 整个数组一次性发送 }; // 发送POST请求 const response = await this.$axios.post('xxxx/aaaaa', requestData); if (parseInt(response.code) === 200) { this.$message({ message: '数据传输完毕!!!', type: 'success', showClose: true }); } else { this.$message({ message: response.msg || '上传失败', type: 'error', showClose: true }); } } catch (error) { console.error('数据异常:', error); this.$message({ message: "上传失败,请检查数据格式或网络连接!", type: 'error', showClose: true }); } finally { // 无论成功失败都关闭加载状态 this.disable = false; loadingInstance.close(); } },
后端部分:
1.控制层
@PostMapping("/xxxx") public AjaxResult importAllCCSData(@RequestBody ExcelImportRequest request) { try { List<Map<String, Object>> excelData = request.getData(); DataSourceUtil.setDB("db2"); importDataService.setCCSDatas(excelData); // 这里可以添加批量处理数据的逻辑 return AjaxResult.success("数据接收成功"); } catch (Exception e) { e.printStackTrace(); return AjaxResult.error("数据接收失败: " + e.getMessage()); } }
2.vo层定义控制层接收的参数实体类
public class ExcelImportRequest { private String tableNames; private List<Map<String, Object>> data; // 接收Excel中的所有数据 // getter和setter方法 public String getTableNames() { return tableNames; } public void setTableNames(String tableNames) { this.tableNames = tableNames; } public List<Map<String, Object>> getData() { return data; } public void setData(List<Map<String, Object>> data) { this.data = data; } }
3.定义业务层:
后端重点:需要定义多线程实体类然后引入serviceimpl进行调用
ThreadPoolConfig
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; @Configuration public class ThreadPoolConfig { @Bean(name = "batchInsertPool") public ThreadPoolExecutor batchInsertPool() { int corePoolSize = Runtime.getRuntime().availableProcessors(); // 核心线程数=CPU核心数 int maxPoolSize = corePoolSize * 2; // 最大线程数=CPU核心数*2 long keepAliveTime = 60; // 空闲线程存活时间 return new ThreadPoolExecutor( corePoolSize, maxPoolSize, keepAliveTime, TimeUnit.SECONDS, new LinkedBlockingQueue<>(1000), // 任务队列容量 new ThreadPoolExecutor.CallerRunsPolicy() // 队列满时,主线程兜底执行 ); } }
引入batchInsertPool并注册定义线程安全技术及和分片用于计算条数和根据划分的片的大小进行分批插入
void setCCSDatas(List<Map<String, Object>> excelData) throws InterruptedException;
@Override public void setCCSDatas(List<Map<String, Object>> excelData) throws InterruptedException { if (excelData == null || excelData.isEmpty()) { return; } // 1. 初始化计数器(从DB读取当前值,避免重启后计数重置) Integer dbCount = mapper.selectTaskData(); totalCount.set(dbCount != null && dbCount > 0 ? dbCount : 0); // 2. 数据分片(将大列表拆成多个小列表) List<List<Map<String, Object>>> dataChunks = splitDataIntoChunks(excelData, BATCH_SIZE); // 3. 用CountDownLatch等待所有线程执行完成 CountDownLatch countDownLatch = new CountDownLatch(dataChunks.size()); // 4. 线程池提交批量插入任务 for (List<Map<String, Object>> chunk : dataChunks) { batchInsertPool.execute(() -> { try { DataSourceUtil.setDB("db2"); // 批量插入当前分片数据 mapper.insertHBSZHConnectionCost(chunk); // 5. 原子更新计数器(分片大小=当前批次插入数量) int currentBatchSize = chunk.size(); totalCount.addAndGet(currentBatchSize); Integer DATA = mapper.selectTaskData(); if (DATA > 0) { // 计数器加1后再更新到数据库 int newCount = totalCount.incrementAndGet(); // 先自增,返回新值 System.err.println(totalCount.get()); mapper.updateTaskData(totalCount.get()); } else { mapper.insertTaskData(1); totalCount.set(0); mapper.updateTaskData(1); } } catch (Exception e) { e.printStackTrace(); // 异常处理:可记录失败分片,后续重试 } finally { countDownLatch.countDown(); // 任务完成,计数器减1 } }); } // 等待所有线程执行完毕,再继续后续逻辑 countDownLatch.await(); System.out.println("所有数据插入完成,总插入条数:" + totalCount.get()); }
4.数据分片的代码
// 数据分片工具方法:将大列表拆成指定大小的小列表 private List<List<Map<String, Object>>> splitDataIntoChunks(List<Map<String, Object>> data, int batchSize) { List<List<Map<String, Object>>> chunks = new ArrayList<>(); for (int i = 0; i < data.size(); i += batchSize) { int end = Math.min(i + batchSize, data.size()); chunks.add(data.subList(i, end)); } return chunks; }
5.数据层
void insertHBSZHConnectionCost(@Param("list") List<Map<String, Object>> dataList);
<insert id="insertHBSZHConnectionCost"> <!-- insert into HBTable (policyNo,docHandFeeRate,NONDOCHANDFEERATE,overallHandFeeRate,NOTES,IMPORTUSER,IMPORTTIME,DATATYPE,ISSUM) values (#{data.POLICYNO},#{data.DOCHANDFEERATE},#{data.NONDOCHANDFEERATE},#{data.OVERALLHANDFEERATE},#{data.NOTES},#{data.IMPORTUSER},CURRENT_TIMESTAMP,#{data.DATATYPE},#{data.ISSUM})--> insert into HB_SZH_CONNECTION_COST (policyno,dochandfeerate,nondochandfeerate,overallhandfeerate,notes,importuser,importtime,datatype,issum) values <foreach collection="list" item="data" separator=","> (#{data.POLICYNO},#{data.DOCHANDFEERATE},#{data.NONDOCHANDFEERATE}, #{data.OVERALLHANDFEERATE},#{data.NOTES},#{data.IMPORTUSER}, CURRENT_TIMESTAMP,#{data.DATATYPE},#{data.ISSUM}) </foreach> </insert>

到这里基本功能就结束了