一.背景
现实场景中当个别业务数据量过大时会影响系统功能性能,当整个业务还没有达到分库的级别时,动态分表也是一个的选择,基本思想是按照一定维度将数据分表存储动态查询。本次实现的是基于springboot的单表动态增删查,首先分表的规则根据一个格式生产,包含时间在其中,每一条数据都可以解析其数据存在的位置,所有的增删查都基于这个格式要求上来的,单条数据的新增和查询性能都没有问题,但是要对所有的数据查询是需要联合所有分表,其实是有有性能问题,在这里只做演示demo,仅供参考。
二.实现效果
(1)新增数据
请求
运行时时间时间格式建表插入数据
数据库的表和数据
(2)删除数据
请求
运行日志
(3)查询
根据id进行查询,与删除类似,根据实际格式进行删除
分页查询(将所有的表联合查询,效率不高,实际场景得按照业务来)
三.实现步骤
1.controller
package com.cpl.tsl.controller; import com.cpl.tsl.bean.Log; import com.cpl.tsl.bean.base.Page; import com.cpl.tsl.bean.base.ResultMap; import com.cpl.tsl.service.LogService; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import org.springframework.web.bind.annotation.*; import javax.annotation.Resource; import java.io.IOException; @RestController @RequestMapping("/log") @Api(tags = "日志模块") public class LogController { @Resource LogService logService; //增加日志 @RequestMapping(value = "/", method = RequestMethod.POST) @ApiOperation(value = "增加日志", notes = "增加日志信息") public ResultMap saveEmp(@RequestBody @ApiParam(required = true, value = "日志") Log log) { ResultMap resultMap = new ResultMap(); try { logService.saveLog(log); resultMap.setStatus("200"); resultMap.setMessage("保存成功"); } catch (Exception e) { resultMap.setStatus("500"); resultMap.setMessage("保存失败"); } return resultMap; } //删除日志 @DeleteMapping("/{ids}") @ApiOperation(value = "删除日志", notes = "删除日志") public ResultMap delLog(@PathVariable("ids") @ApiParam(required = true, value = "逗号分隔id字符串") String ids) { ResultMap resultMap = new ResultMap(); try { logService.delLog(ids); resultMap.setStatus("200"); resultMap.setMessage("删除成功"); } catch (Exception e) { resultMap.setStatus("500"); resultMap.setMessage("删除失败"); } return resultMap; } //查询日志 @GetMapping("/{id}") @ApiOperation(value = "查询日志", notes = "查询日志") public ResultMap getLog(@PathVariable("id") @ApiParam(required = true, value = "id") String id) throws IOException { ResultMap resultMap = new ResultMap(); resultMap.setData(logService.getLogById(id)); resultMap.setStatus("200"); resultMap.setMessage("成功"); return resultMap; } //查询所有的日志 @GetMapping("/getPage") @ApiOperation(value = "查询所有的日志带分页", notes = "查询所有的日志带分页") public Page<Log> getAllEmp( @ApiParam(name = "pageSize", value = "每页大小", required = true) @RequestParam Integer pageSize, @ApiParam(name = "pageNo", value = "开始页", required = true) @RequestParam Integer pageNo) { return logService.getPage(pageSize,pageNo); } }
2.service及实现
package com.cpl.tsl.service; import com.cpl.tsl.bean.Employee; import com.cpl.tsl.bean.Log; import com.cpl.tsl.bean.base.Page; import org.springframework.transaction.annotation.Transactional; import java.io.IOException; import java.util.List; /** * 日志模块service */ @Transactional public interface LogService { void saveLog(Log log); void delLog(String ids); Log getLogById(String id); Page<Log> getPage(int pageSize, int pageNo); }
3.serviceImpl
package com.cpl.tsl.service.Impl; import com.cpl.tsl.bean.Log; import com.cpl.tsl.bean.Tables; import com.cpl.tsl.bean.base.Page; import com.cpl.tsl.dao.LogDao; import com.cpl.tsl.service.LogService; import org.apache.poi.util.StringUtil; import org.springframework.stereotype.Service; import javax.annotation.Resource; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Random; import java.util.UUID; /** * <p> * 服务实现类 * </p> * * @author lll * @since 2023-04-10 */ @Service public class LogServiceImpl implements LogService { @Resource private LogDao logDao; /** * 根据当前时间按照分钟级别建表进行动态建表 */ @Override public void saveLog(Log log) { //获取当前时间 Date nowTome = new Date(); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmm"); String nowMintux = dateFormat.format(nowTome); //判断当前时间级别(分钟)是否有表 String logTable = "Log" + nowMintux; Integer table = logDao.queryTableByName(logTable); //没有表进行建表 if (table == 0) { logDao.createTable(logTable); } //进行数据插入 //id组成(时间yyyyMMddhhmmssSSS+系统码tsl+随机码) SimpleDateFormat dateFormatForId = new SimpleDateFormat("yyyyMMddHHmmssSSS"); Random r = new Random(); String random = String.format("%012d", r.nextInt(999999999)); log.setId(dateFormatForId.format(nowTome) + "tsl" + random); logDao.saveLog(logTable, log); } @Override public void delLog(String ids) { String[] idArr = ids.split(","); for (int i = 0; i < idArr.length; i++) { //通过id取值获取对应的表名 String tableName = "log" + idArr[i].substring(0, 12); //通过表名和id值进行删除 logDao.deleteByIdAndTableName(idArr[i], tableName); } } @Override public Log getLogById(String id) { //通过id取值获取对应的表名 String tableName = "log" + id.substring(0, 12); //通过表名和id值进行查詢 return logDao.queryByIdAndTableName(id, tableName); } @Override public Page<Log> getPage(int pageSize, int pageNo) { //查询所有的表名 List<Tables> logList = logDao.getAllLogTable(); if (logList != null && logList.size() > 0) { //动态分页查询 int page = (pageNo - 1) * pageSize; int logCount = logDao.getLogCount(logList); if (logCount > 0) { Page<Log> logPage = new Page<>(); List<Log> logs = logDao.getPage(logList, pageSize, page); logPage.setRows(logs); logPage.setPageSize(pageSize); logPage.setCurrentPage(pageNo); logPage.setTotalCount(logCount); logPage.setTotalPage(logCount / pageSize + 1); return logPage; } else { return new Page<>(); } } return new Page<>(); } }
4.dao
package com.cpl.tsl.dao; import com.cpl.tsl.bean.Log; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.cpl.tsl.bean.Tables; import com.cpl.tsl.bean.base.Page; import org.apache.ibatis.annotations.Param; import java.util.List; /** * <p> * Mapper 接口 * </p> * * @author lll * @since 2023-04-10 */ public interface LogDao extends BaseMapper<Log> { int queryTableByName(@Param("tableName") String tableName); int createTable(@Param("tableName") String tableName); void saveLog(@Param("tableName") String tableName, @Param("log") Log log); void deleteByIdAndTableName(@Param("id") String id, @Param("tableName") String tableName); Log queryByIdAndTableName(@Param("id") String id,@Param("tableName") String tableName); List<Tables> getAllLogTable(); List<Log> getPage(@Param("logList") List logList, @Param("pageSize") int pageSize, @Param("page") int page); int getLogCount(@Param("logList") List<Tables> logList); }
5.dao接口实现xml
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <mapper namespace="com.cpl.tsl.dao.LogDao"> <insert id="createTable" parameterType="String"> CREATE TABLE ${tableName} ( `id` varchar(255) NOT NULL COMMENT '主键', `action` varchar(255) DEFAULT NULL COMMENT '行为描述', `create_time` varchar(32) DEFAULT NULL COMMENT '创建时间', `creator` varchar(255) DEFAULT NULL COMMENT '创建者', `creator_id` varchar(32) DEFAULT NULL COMMENT '创建者id', `module` varchar(255) DEFAULT NULL COMMENT '模块', `flag` varchar(1) DEFAULT NULL COMMENT '标识', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; </insert> <insert id="saveLog"> INSERT INTO ${tableName} <trim prefix="(" suffix=")" suffixOverrides=","> <if test="log.id != null">id,</if> <if test="log.action != null">action,</if> <if test="log.createTime != null">create_time,</if> <if test="log.creator != null">creator,</if> <if test="log.creatorId != null">creator_id,</if> <if test="log.module != null">module,</if> <if test="log.flag != null">flag</if> </trim> <trim prefix="VALUES (" suffix=")" suffixOverrides=","> <if test="log.id != null">#{log.id},</if> <if test="log.action != null">#{log.action},</if> <if test="log.createTime != null">#{log.createTime},</if> <if test="log.creator != null">#{log.creator},</if> <if test="log.creatorId != null">#{log.creatorId},</if> <if test="log.module != null">#{log.module},</if> <if test="log.flag != null">#{log.flag},</if> </trim> </insert> <select id="queryTableByName" parameterType="String" resultType="Integer"> select count(*) from information_schema.TABLES where table_name = #{tableName} </select> <delete id="deleteByIdAndTableName"> delete from `${tableName}` where id = #{id}; </delete> <select id="queryByIdAndTableName" resultType="com.cpl.tsl.bean.Log"> select * from `${tableName}` where id = #{id}; </select> <select id="getAllLogTable" resultType="com.cpl.tsl.bean.Tables"> select * from information_schema.TABLES where table_name LIKE "log%" </select> <select id="getPage" resultType="com.cpl.tsl.bean.Log"> select T.* from ( <foreach collection="logList" item="item" separator=" UNION ALL"> SELECT * FROM ${item.tableName} </foreach> ) T limit #{page},#{pageSize} </select> <select id="getLogCount" resultType="java.lang.Integer"> select count(*) from ( <foreach collection="logList" item="item" separator=" UNION ALL"> SELECT * FROM `${item.tableName}` </foreach> ) T </select> </mapper>
6.bean
package com.cpl.tsl.bean; import java.io.Serializable; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import lombok.Data; /** * <p> * * </p> * * @author lll * @since 2023-04-10 */ @ApiModel(value="Log对象", description="") @Data public class Log implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "主键") private String id; @ApiModelProperty(value = "行为描述") private String action; @ApiModelProperty(value = "创建时间") private String createTime; @ApiModelProperty(value = "创建者") private String creator; @ApiModelProperty(value = "创建者id") private String creatorId; @ApiModelProperty(value = "模块") private String module; @ApiModelProperty(value = "标识") private String flag; @Override public String toString() { return "Log{" + "id=" + id + ", action=" + action + ", createTime=" + createTime + ", creator=" + creator + ", creatorId=" + creatorId + ", module=" + module + ", flag=" + flag + "}"; } }
四.注意事项
1.本demo方案是通过时间进行取模,为了方便测试级别取的分钟,实际场景可按照天或者其他的级别进行获取
2.实际业务中不只是时间取模,可以某个字段hash取模或者时间戳等等都可以
3.本次分表是按照时间格式直接建表,也可以保证基本表不变,在定时任务中按照一定维度进行建表数据迁移,基本思想是一致的,按照一定维度将数据分表存储动态查询
4.实际开发过程中SQL语句中查询表名一定要带上·(“·”叫做“间隔号”),防止sql注入
五.源码
源码:https://github.com/CodingPandaLLL/tsl.git
标签:tsl,String,spring,boot,源码,tableName,import,com,id From: https://www.cnblogs.com/LiLiliang/p/17303622.html