100万数据插入 mysql 性能测试

Mysql服务器信息前端

8核16Gjava

 

开发台式机信息mysql

固态硬盘、公司内部网络web

 

示例代码spring

1.采用jdk1.8 + springboot 2.1.6 + mybatis 3.0 框架组合而成的工程示例sql

2.mysql5.7 全部过程采用默认安装,没作特殊优化数据库

3.测试表,只有主键,ID不自增api

mysql.sqltomcat

CREATE TABLE `t_result` (
  `id` bigint(8) NOT NULL,
  `content` varchar(50) DEFAULT NULL,
  PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4

pom.xmlspringboot

<!-- springboot框架包 -->
<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-web</artifactId>
</dependency>
<!-- springJPA-->
<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<dependency>
    <groupId>org.mybatis.spring.boot</groupId>
    <artifactId>mybatis-spring-boot-starter</artifactId>
    <version>2.1.0</version>
</dependency>
<!-- mysql驱动包-->
<dependency>
    <groupId>mysql</groupId>
    <artifactId>mysql-connector-java</artifactId>
    <version>5.1.20</version>
</dependency>
<!-- 阿里的数据库链接池-->
<dependency>
    <groupId>com.alibaba</groupId>
    <artifactId>druid</artifactId>
    <version>1.0.18</version>
</dependency>

mybatis.cfg.xml

<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE configuration PUBLIC "-//mybatis.org//DTD Config 3.0//EN" "http://mybatis.org/dtd/mybatis-3-config.dtd">
<!-- 全局配置文件 -->
<configuration>
</configuration>

ResultMapper.xml

<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">

<mapper namespace="com.pro.app.result.dao.ResultMapper">
    <!-- 批量插入 -->
    <insert id="insertResultBatch" parameterType="java.util.List">
        insert into t_result (id, content) values
        <foreach collection="list" item="item" index="index" separator=",">
            (#{item.id}, #{item.content})
        </foreach>
    </insert>
    <!-- 单条插入 -->
    <insert id="insert" useGeneratedKeys="true" keyProperty="id" parameterType="com.pro.app.entity.Result">
        insert into t_result(id,content) value(#{id},#{content})
    </insert>
</mapper>

application.properties

#tomcat address port
server.port=8089
server.address=127.0.0.1
server.sessionTimeout=30

spring.application.name=demo-web

#2.0 path
server.servlet.context-path=/api

#采用logback.xml配置日志输出模板 
logging.config=classpath:logback.xml

#springframework.web框架的日志级别,注意与自已代码中定义的日志级别不同
logging.level.org.springframework.web:INFO
#是否启用springboot的debug调试模式,会打印详细日志信息
debug=false

# 数据库访问配置
# 主数据源,默认的
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.driver-class-name=com.mysql.jdbc.Driver
spring.datasource.url=jdbc:mysql://192.168.110.01:3306/demo?useUnicode=true&characterEncoding=utf-8
spring.datasource.username=root
spring.datasource.password=123465

# 实体类映射SQL配置
mybatis.mapper-locations=classpath:mapper/*.xml
# mybatis配置
mybatis.config-location=classpath:mybatis.cfg.xml

Result.java

@Data
public class Result {
    @Id
    private long id;
    @Column(name = "content")
    private String content;
}

ResultMapper.java

@Mapper
@Component(value = "ResultMapper")
public interface ResultMapper {
    /**
     * 批量插入
     * @param resultList
     * @return
     */
    int insertResultBatch(List<Result> resultList);
    /**
     * 增
     */
    int insert(Result result);
}

ResultService.java

@Service
public class ResultService {
    @Autowired
    private ResultMapper resultMapper;
    /**
     * 新增数据
     * @param result
     */
    public void insert(Result result){
        resultMapper.insert(result);
    }
    /**
     * 新增数据
     * @param resultList
     */
    public void insertResultBatch(List<Result> resultList){
        resultMapper.insertResultBatch(resultList);
    }
}

ResultApi.java

@RestController
@RequestMapping("/result")
public class ResultApi {
    @Autowired
    private ResultService resultService;
    /**
     * 批量添加新的对象
     * @return
     */
    @RequestMapping(value = "/batAdd", method = {RequestMethod.GET, RequestMethod.POST})
    @ResponseBody
    public Object batAdd(){
        Result result;        
        long startTime =  System.currentTimeMillis();
        String content = startTime  + "";
        for (int i=1;i<=1000000;i++){
            result = new Result();
            result.setId(i);
            result.setContent(content);
            resultService.insert(result);
        }
        ApiResult result1 = new ApiResult();
        result1.setData(System.currentTimeMillis() - startTime);
        return result1;
    }
    /**
     * 批量添加新的对象
     * @return
     */
    @RequestMapping(value = "/batAddList", method = {RequestMethod.GET, RequestMethod.POST})
    @ResponseBody
    public Object batAddList(){
        Result result ;
        long startTime =  System.currentTimeMillis();
        String content = startTime  + "";
        List<Result> resultList = new ArrayList<>(1000);
        for (int i=1011001;i<=2011000;i++){
            result = new Result();
            result.setId(i);
            result.setContent(content);
            resultList.add(result);
            if (resultList.size() == 1000){
                resultService.insertResultBatch(resultList);
                resultList.clear();
            }
        }
        ApiResult result1 = new ApiResult();
        result1.setData(System.currentTimeMillis() - startTime);
        return result1;
    }
}

注:ApiResult 是内部封装的向前端响应的实体对象,主要有三个变量:code,msg,data

AppStart.java

@SpringBootApplication
@EnableAutoConfiguration
public class AppStart {
    public static void main(String[] args) {
        SpringApplication.run(AppStart.class,args);
    }
}

 

测试结果

-- 单条插入1万
{
    "code": "1",
    "data": "27323",
    "msg": ""
}
-- 单条插入100万
{
    "code": "1",
    "data": "2868335",
    "msg": ""
}

-- 批量插入1000
{
    "code": "1",
    "data": "160",
    "msg": ""
}
-- 批量插入1万
{
    "code": "1",
    "data": "355",
    "msg": ""
}
-- 批量插入100万
{
    "code": "1",
    "data": "31673",
    "msg": ""
}

 

数据统计

累计入库总记录数

测试库总大小

 

总结

    100万条记录一条一条的插入到数据表中,须要耗时约 47分钟 =(2868335 / 1000 / 60) ,说实在的比预期要慢不少;

    100万条记录每1000条为一批的方式插入到数据表中,须要耗时约 31秒 = (31673 / 1000),速度就挺快了,结果比较满意;

    所以涉及到大批量数据插入到数据库表中,则因采用批量插入方式,操做前要注意数据表的索引是否过多或先取消除主键外的其它索引(每新增数据须要维护索引,会加大数据库插入的开销),待100万以上批量数据所有入库后在重建索引,同时还须要注意,mysql配置中的max_allowed_packet的值大小,以防SQL语句自己超过指定的大小,致使数据库异常;

相关文章
相关标签/搜索