背景:上游系统有15万+生产商品数据要导入到我们系统(只同步一次),这个数据需要各个服务owner修改完价格之后一一确认之后才能导入,既然是多个人经手改,模板就可能会变动,使用excel导入的方式同步到系统 存在以下问题
1.模板可能改动,导入字段映射需要跟着改变
2.导入分批插入事物无法保证
3.只使用一次的不应该做为功能,管理员可能误操作导致数据问题
权衡之后决定使用通过解析Excel将数据转换之后在生成SQL插入语句,这样修改业务excel变动了,直接在本地调整映射关系,更灵活
使用方式基于阿里开源框架EasyExcel导入导出性能及优,使用反射生成SQL语句
1.引入依赖包
<dependencies> <dependency> <groupId>cn.hutool</groupId> <artifactId>hutool-all</artifactId> <version>5.8.5</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>easyexcel</artifactId> <version>3.1.5</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>fastjson</artifactId> <version>2.0.22</version> </dependency>
2.解析Excel分批生成SQL
package com.lhl.http.com.lhl.sql;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.lang.Snowflake;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.ReflectUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.read.listener.PageReadListener;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
public class GenerateSql {
public static void main(String[] args) throws IOException {
String excelPath = "D:\\商品信息.xlsx";
String sqlPth = "d:\\t_goods.sql";
parseExcelConvertSql(excelPath, sqlPth);
}
public static void parseExcelConvertSql(String excelFilePath, String sqlFilePath) throws IOException {
BufferedWriter bw = new BufferedWriter(new FileWriter(sqlFilePath));
// excel分页解析,默认每批次100条
EasyExcel.read(excelFilePath, new PageReadListener<Map<Integer, String>>(dataList -> {
dataList.forEach(data -> {
Goods goods = convertMap(data);
String sql = getInsertSql("t_goods", Goods.class, goods);
try {
bw.write(sql + ";\n");
bw.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
});
})).sheet().doRead();
}
/**
* 基于对象生成插入sql
*/
public static <T> String getInsertSql(String tableName, Class<T> clazz, T t) {
Field[] fields = ReflectUtil.getFields(clazz);
StringBuffer topHalf = new StringBuffer("insert into " + tableName + "(");
StringBuffer afterAalf = new StringBuffer("values(");
for (Field field : fields) {
topHalf.append(field.getName() + ",");
if (ReflectUtil.getFieldValue(t, field.getName()) instanceof String) {
afterAalf.append("'" + ReflectUtil.getFieldValue(t, field.getName()) + "',");
} else {
afterAalf.append(ReflectUtil.getFieldValue(t, field.getName()) + ",");
}
}
topHalf = new StringBuffer(StrUtil.removeSuffix(topHalf.toString(), ","));
afterAalf = new StringBuffer(StrUtil.removeSuffix(afterAalf.toString(), ","));
topHalf.append(")");
afterAalf.append(")");
return topHalf + afterAalf.toString();
}
private static Goods convertMap(Map<Integer, String> data) {
Map map = new HashMap();
for (Integer key : indexColumnMap.keySet()) {
map.put(indexColumnMap.get(key), data.get(key));
}
map.put("id", getId());
Goods goods = new Goods();
return BeanUtil.fillBeanWithMap(map, goods, true, true);
}
/**
* 生成唯一id
* @return
*/
private static String getId() {
Snowflake snowflake1 = IdUtil.getSnowflake(1, 1);
return snowflake1.nextIdStr();
}
/**
* excel列和字段映射关系
*/
private static Map<Integer, String> indexColumnMap = new HashMap();
static {
int index = 0;
indexColumnMap.put(index++, "goodName");
indexColumnMap.put(index++, "price");
indexColumnMap.put(index++, "createBy");
indexColumnMap.put(index++, "createDate");
}
@Getter
@Setter
@NoArgsConstructor
public static class Goods {
private String id;
private String good_name;
private double price;
private String create_by;
private String create_date;
}
}
3.生成结果
insert into t_goods(id,good_name,price,create_by,create_date)values('1612839604908593152',null,5300.0,null,null);
insert into t_goods(id,good_name,price,create_by,create_date)values('1612839604916981760',null,5300.0,null,null);
insert into t_goods(id,good_name,price,create_by,create_date)values('1612839604916981761',null,5300.0,null,null);