MySQL批量Insert,HikariCP、线程池参数优化测试
参考
作者: leizhimin
源自: http://blog.51cto.com/lavasoft/185010
转载: https://blog.csdn.net/wwd0501/article/details/45056631
发布: 2009-07-29
环境
- MySQL 5.6.38
- CentOS 7
- JavaSE 1.8
- HikariCP数据库连接池
测试方案
执行10万次Insert语句,使用不同方式。
-
C组:静态SQL,不自动提交,有事务控制(InnoDB引擎)
1、逐条执行10万次
2、分批执行将10万分成m批,每批n条,分多种分批方案来执行。 -
D组:预编译模式SQL,不自动提交,有事务控制(InnoDB引擎)
1、逐条执行10万次
2、分批执行将10万分成m批,每批n条,分多种分批方案来执行。
测试代码
HikariPool.java
package helloworld;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
public class HikariPool {
private static HikariDataSource ds;
public static void Start(int PoolSize) {
HikariConfig config = new HikariConfig();
config.setJdbcUrl("jdbc:mysql://192.168.1.1:3306/test");
config.setUsername("xxxx");
config.setPassword("xxxx");
// config.setDriverClassName("com.mysql.jdbc.Driver");
config.addDataSourceProperty("cachePrepStmts", "true");
config.addDataSourceProperty("prepStmtCacheSize", "250");
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
config.addDataSourceProperty("rewriteBatchedStatements", "true");
config.setMaximumPoolSize(PoolSize <= 0 ? 10 : PoolSize);
config.setValidationTimeout(3000);
config.setConnectionTimeout(60000);
try {
ds = new HikariDataSource(config);
Properties prop = ds.getDataSourceProperties();
} catch (Exception e) {
ds = null;
}
}
public static Connection getConn() throws SQLException {
return null == ds ? null : ds.getConnection();
}
public static int getMaxPoolSize() {
return ds.getMaximumPoolSize();
}
public static void Close() {
if (null != ds) {
ds.close();
}
}
}
TestBatch.java
参考: http://blog.51cto.com/lavasoft/185010 的实现,添加了线程池、HikariCP等
package helloworld;
import java.io.IOException;
import java.sql.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
/**
* JDBC批量Insert优化(下)
*
* @author leizhimin 2009-7-29 10:03:10
*/
public class TestBatch {
public static int recs_total = 300000; //总记录数
public static AtomicInteger tasks_done = new AtomicInteger(0);
public static AtomicInteger conn_elapsed = new AtomicInteger(0);
public static AtomicInteger conn_attched = new AtomicInteger(0);
public static ExecutorService execService;
/*public static DbConnectionBroker myBroker = null;
static {
try {
myBroker = new DbConnectionBroker("com.mysql.jdbc.Driver",
"jdbc:mysql://192.168.104.163:3306/test",
"vcom", "vcom", 2, 4,
"c:\\testdb.log", 0.01);
} catch (IOException e) {
e.printStackTrace();
}
}
*/
public static String generateString(int length) {
String s = "";
for (int i = 0; i < length && s.length() < length; i++) {
s = s + Math.random()%37;
}
return s.substring(0,length);
}
/**
* 初始化测试环境
*
* @throws SQLException 异常时抛出
*/
public static void init() throws SQLException {
Connection conn = HikariPool.getConn();
conn.setAutoCommit(false);
Statement stmt = conn.createStatement();
stmt.addBatch("DROP TABLE IF EXISTS tuser");
/*
stmt.addBatch("CREATE TABLE tuser (\n" +
" id bigint(20) NOT NULL AUTO_INCREMENT,\n" +
" name varchar(12) DEFAULT NULL,\n" +
" remark varchar(24) DEFAULT NULL,\n" +
" createtime datetime DEFAULT NULL,\n" +
" updatetime datetime DEFAULT NULL,\n" +
" PRIMARY KEY (id)\n" +
") ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8");
*/
stmt.addBatch("CREATE TABLE tuser (\n" +
" id bigint(20) NOT NULL,\n" +
" name varchar(12) DEFAULT NULL,\n" +
" remark varchar(24) DEFAULT NULL,\n" +
" createtime datetime DEFAULT NULL,\n" +
" updatetime datetime DEFAULT NULL,\n" +
" PRIMARY KEY (id)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8");
stmt.executeBatch();
conn.commit();
// myBroker.freeConnection(conn);
conn.close();
}
/**
* 100000条静态SQL插入
*
* @throws Exception 异常时抛出
*/
public static void testInsert() throws Exception {
init(); //初始化环境
conn_elapsed.set(0);
tasks_done.set(0);
conn_attched.set(0);
Long start = System.currentTimeMillis();
for (int i = 0; i < recs_total; i++) {
execService.execute(new ExecDuplicate("c.insert", i + 1, 0));
}
while (tasks_done.get() < recs_total) {
Thread.sleep(50);
}
Long end = System.currentTimeMillis();
long elapsed_lx = (end - start);
double elapsed = elapsed_lx / 1000f;
System.out.println("单条插入" + recs_total + "条记录,耗时:" + Math.round(elapsed*100)*0.01 + "秒, Avg:" + Math.round(recs_total/elapsed*100)*0.01 + " TPS! 连接耗时:" + conn_elapsed.get()/1000f);
}
/**
* 批处理执行静态SQL测试
*
* @param m 批次
* @param n 每批数量
* @throws Exception 异常时抛出
*/
public static void testInsertBatch(int m, int n) throws Exception {
init(); // 初始化环境
Long start = System.currentTimeMillis();
conn_elapsed .set(0);
tasks_done.set(0);
for (int i = 0; i < m; i++) {
execService.execute(new ExecDuplicate("c.batch", i * n + 1, n));
}
while (tasks_done.get() < m) {
Thread.sleep(50);
}
Long end = System.currentTimeMillis();
long elapsed_lx = (end - start);
double elapsed = elapsed_lx / 1000f;
System.out.println("批量插入" + m + "批 * " + n + "条/批 (共" + m * n + "条),耗时:"
+ Math.round(elapsed*100)*0.01 + "秒, Avg:" + Math.round(m * n / elapsed*100)*0.01 + " TPS! 连接耗时:" + conn_elapsed.get()/1000f);
}
/**
* 100000条预定义SQL插入
*
* @throws Exception 异常时抛出
*/
public static void testInsert2() throws Exception {
//单条执行100000条Insert操作,共耗时:40.422秒!
init(); //初始化环境
conn_elapsed.set(0);
tasks_done.set(0);
conn_attched.set(0);
Long start = System.currentTimeMillis();
int i;
for (i = 0; i < recs_total; i++) {
execService.execute(new ExecDuplicate("d.insert", i + 1, 0));
}
while (tasks_done.get() < recs_total) {
Thread.sleep(50);
}
Long end = System.currentTimeMillis();
long elapsed_lx = (end - start);
double elapsed = elapsed_lx / 1000f;
System.out.println("单条插入" + recs_total + "条记录,耗时:"
+ Math.round(elapsed*100)*0.01 + "秒, Avg:"
+ Math.round(recs_total/elapsed*100)*0.01 + " TPS! 连接耗时:" + conn_elapsed.get()/1000f);
}
/**
* 批处理执行预处理SQL测试
*
* @param m 批次
* @param n 每批数量
* @throws Exception 异常时抛出
*/
public static void testInsertBatch2(int m, int n) throws Exception {
init(); //初始化环境
Long start = System.currentTimeMillis();
conn_elapsed.set(0);
tasks_done.set(0);
for (int i = 0; i < m; i++) {
execService.execute(new ExecDuplicate("d.batch", i * n + 1, n));
}
while (tasks_done.get() < m) {
Thread.sleep(50);
}
Long end = System.currentTimeMillis();
long elapsed_lx = (end - start);
double elapsed = elapsed_lx / 1000f;
System.out.println("批量插入" + m + "批 * " + n + "条/批 (共" + m * n + "条),耗时:"
+ Math.round(elapsed*100)*0.01 + "秒, Avg:" + Math.round(m * n / elapsed*100)*0.01 + " TPS! 连接耗时:" + conn_elapsed.get()/1000f);
}
public static void main(String[] args) throws Exception {
int HikariPoolSize = 50;
int ThreadPoolSize = HikariPoolSize;
boolean single = false;
recs_total = single ? 30000 : 300000; // 总记录数
HikariPool.Start(HikariPoolSize);
execService = Executors.newFixedThreadPool(ThreadPoolSize);
init();
Long start = System.currentTimeMillis();
System.out.println("--------C组测试,PoolSize:"+ ThreadPoolSize + "----------");
int batchRecords[] = {
2000, 1500, 1000, 500, 200, 100, 50, 20, 0 };
if (single) {
testInsert();
} else {
for (int i = 0; batchRecords[i] > 0; i++) {
testInsertBatch(recs_total / batchRecords[i], batchRecords[i]);
}
}
Long end1 = System.currentTimeMillis();
System.out.println("C组测试过程结束,全部测试耗时:" + (end1 - start) / 1000f + "秒!");
System.out.println("--------D组测试,PoolSize:"+ ThreadPoolSize + "----------");
if (single) {
testInsert2();
} else {
for (int i = 0; batchRecords[i] > 0; i++) {
testInsertBatch2(recs_total / batchRecords[i], batchRecords[i]);
}
}
Long end2 = System.currentTimeMillis();
System.out.println("D组测试过程结束,全部测试耗时:" + (end2 - end1) / 1000f + "秒!");
execService.shutdown();
HikariPool.Close();
}
public static void connElapsed(Long elapsed) {
conn_elapsed.addAndGet(elapsed.intValue() );
}
public static void taskDone() {
tasks_done.incrementAndGet();
}
public static void connAttached() {
conn_attched.incrementAndGet();
}
public static void connDettached() {
conn_attched.decrementAndGet();
}
}
ExecDuplicate.java
package helloworld;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
public class ExecDuplicate implements Runnable {
public String what;
public int id;
public int recs_per_batch;
public ExecDuplicate(String what, int id, int records_per_batch) {
this.what = what;
this.id = id;
this.recs_per_batch = records_per_batch;
}
public static String generateString(int length) {
String s = "";
for (int i = 0; i < length && s.length() < length; i++) {
s = s + Math.random()%37;
}
return s.substring(0,length);
}
// @override
public void run() {
// TODO Auto-generated method stub
try {
switch (what) {
case "c.insert":
test_c_insert();
break;
case "c.batch":
test_c_batch();
break;
case "d.insert":
test_d_insert();
break;
case "d.batch":
test_d_batch();
break;
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void test_c_insert() throws Exception {
try {
// while (connRunning.get() > 30) {
// Thread.sleep(50);
// }
String sql = "\n" + "insert into test.tuser \n" + "\t(id, name, \n" + "\tremark, \n" + "\tcreatetime, \n"
+ "\tupdatetime\n" + "\t)\n" + "\tvalues\n" + "\t(" + (id%5== 0 ? (id-1) : id) + ",'"+ generateString(12) + "', \n" + "\t'"
+ generateString(24) + "', \n" + "\tnow(), \n" + "\tnow()\n" + ") ON DUPLICATE KEY UPDATE remark=name";
Long connStart = System.currentTimeMillis();
// 从池中获取连接
Connection conn = HikariPool.getConn();
TestBatch.connAttached();
Long connDone = System.currentTimeMillis();
TestBatch.connElapsed((connDone - connStart));
conn.setAutoCommit(false);
Statement stmt = conn.createStatement();
stmt.execute(sql);
conn.commit();
// myBroker.freeConnection(conn);
if (conn != null && !conn.isClosed()) {
conn.close();
TestBatch.connDettached();
}
TestBatch.taskDone();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
}
}
/**
* 批处理执行静态SQL测试
*
* @param m 批次
* @param n 每批数量
* @throws Exception 异常时抛出
*/
public void test_c_batch() throws Exception {
try {
Long connStart = System.currentTimeMillis();
// 从池中获取连接
Connection conn;
conn = HikariPool.getConn();
Long connDone = System.currentTimeMillis();
TestBatch.connElapsed((connDone - connStart));
conn.setAutoCommit(false);
Statement stmt = conn.createStatement();
for (int k = 0; k < recs_per_batch; k++, id++) {
String sql = "\n" + "insert into test.tuser \n" + "\t(id, name, \n" + "\tremark, \n" + "\tcreatetime, \n"
+ "\tupdatetime\n" + "\t)\n" + "\tvalues\n" + "\t(" + (id%5==0 ? (id-1) : id) + ",'"+ generateString(12) + "', \n" + "\t'"
+ generateString(24) + "', \n" + "\tnow(), \n" + "\tnow()\n" + ") ON DUPLICATE KEY UPDATE remark=name";
// 加入批处理
stmt.addBatch(sql);
}
stmt.executeBatch(); // 执行批处理
conn.commit();
// stmt.clearBatch(); //清理批处理
stmt.close();
// myBroker.freeConnection(conn); //连接归池
conn.close();
TestBatch.taskDone();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* 100000条预定义SQL插入
*
* @throws Exception 异常时抛出
*/
public void test_d_insert() throws Exception {
//单条执行100000条Insert操作,共耗时:40.422秒!
try {
String sql = "" + "insert into test.tuser\n" + " (id, name, remark, createtime, updatetime)\n" + "values\n"
+ " (?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE remark=name";
Long connStart = System.currentTimeMillis();
// 从池中获取连接
Connection conn = HikariPool.getConn();
TestBatch.connAttached();
Long connDone = System.currentTimeMillis();
TestBatch.connElapsed((connDone - connStart));
conn.setAutoCommit(false);
PreparedStatement pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, (id%5==0 ? (id-1) : id));
pstmt.setString(2, generateString(12));
pstmt.setString(3, generateString(24));
pstmt.setDate(4, new Date(System.currentTimeMillis()));
pstmt.setDate(5, new Date(System.currentTimeMillis()));
pstmt.executeUpdate();
conn.commit();
pstmt.close();
// myBroker.freeConnection(conn);
if (conn != null && !conn.isClosed()) {
conn.close();
TestBatch.connDettached();
}
TestBatch.taskDone();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
}
}
/**
* 批处理执行预处理SQL测试
*
* @param m 批次
* @param n 每批数量
* @throws Exception 异常时抛出
*/
public void test_d_batch() throws Exception {
try {
String sql = "" + "insert into test.tuser\n" + " (id, name, remark, createtime, updatetime)\n" + "values\n"
+ " (?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE remark=name";
// 从池中获取连接
Long connStart = System.currentTimeMillis();
// 从池中获取连接
Connection conn = HikariPool.getConn();
Long connDone = System.currentTimeMillis();
TestBatch.connElapsed((connDone - connStart));
conn.setAutoCommit(false);
PreparedStatement pstmt = conn.prepareStatement(sql);
for (int k = 0; k < recs_per_batch; k++, id++) {
pstmt.setInt(1, (id%5==0 ? (id-1) : id));
pstmt.setString(2, generateString(12));
pstmt.setString(3, generateString(24));
pstmt.setDate(4, new Date(System.currentTimeMillis()));
pstmt.setDate(5, new Date(System.currentTimeMillis()));
// 加入批处理
pstmt.addBatch();
}
pstmt.executeBatch(); // 执行批处理
conn.commit();
// pstmt.clearBatch(); //清理批处理
pstmt.close();
// myBroker.freeConnection(conn); //连接归池
if (conn != null && !conn.isClosed()) {
conn.close();
}
TestBatch.taskDone();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
测试结果
HikariCP连接池30个连接、30个工作线程
--------C组测试----------
单条插入50000条记录,耗时:87.34秒, 平均:572.45 TPS! 连接耗时:0.321
批量插入25批 * 2000条/批 (共50000条),耗时:10.04秒, 平均:4978.1 TPS! 连接耗时:0.0
批量插入50批 * 1000条/批 (共50000条),耗时:9.39秒, 平均:5325.38 TPS! 连接耗时:0.423
批量插入100批 * 500条/批 (共50000条),耗时:10.27秒, 平均:4869.02 TPS! 连接耗时:0.281
批量插入200批 * 250条/批 (共50000条),耗时:10.36秒, 平均:4827.19 TPS! 连接耗时:0.11
批量插入400批 * 125条/批 (共50000条),耗时:9.92秒, 平均:5041.85 TPS! 连接耗时:0.066
批量插入1000批 * 50条/批 (共50000条),耗时:10.83秒, 平均:4617.66 TPS! 连接耗时:0.072
批量插入2500批 * 20条/批 (共50000条),耗时:13.42秒, 平均:3726.34 TPS! 连接耗时:0.003
C组测试过程结束,全部测试耗时:163.24秒!
--------D组测试----------
单条插入50000条记录,耗时:76.19秒, 平均:656.23 TPS! 连接耗时:0.115
批量插入25批 * 2000条/批 (共50000条),耗时:10.1秒, 平均:4950.99 TPS! 连接耗时:0.179
批量插入50批 * 1000条/批 (共50000条),耗时:10.72秒, 平均:4664.18 TPS! 连接耗时:0.401
批量插入100批 * 500条/批 (共50000条),耗时:10.61秒, 平均:4710.76 TPS! 连接耗时:0.185
批量插入200批 * 250条/批 (共50000条),耗时:9.67秒, 平均:5171.17 TPS! 连接耗时:0.084
批量插入400批 * 125条/批 (共50000条),耗时:9.99秒, 平均:5003.0 TPS! 连接耗时:0.051
批量插入1000批 * 50条/批 (共50000条),耗时:10.96秒, 平均:4560.38 TPS! 连接耗时:0.022
批量插入2500批 * 20条/批 (共50000条),耗时:13.04秒, 平均:3833.77 TPS! 连接耗时:0.003
D组测试过程结束,全部测试耗时:153.123秒!
统计图表
C组
工作线程数 | 插入1条/次 | 插入2000条/次 | 1000条/次 | 500条/次 | 250条/次 | 125条/次 | 50条/次 | 20条/次 |
---|---|---|---|---|---|---|---|---|
1 | 32.65 | 530.25 | 520.4 | 508.83 | 494.8 | 476.85 | 413.04 | 302.41 |
5 | 112.83 | 1840.60 | 1726.52 | 1909.85 | 1824.82 | 1760.56 | 1602.05 | 1186.38 |
10 | 228.29 | 1930.13 | 2761.67 | 2811.36 | 2724.8 | 2541.94 | 2322.88 | 1668.89 |
20 | 461.77 | 4014.77 | 4889.5 | 4856.73 | 4892.37 | 4768.72 | 4252.06 | 3179.65 |
30 | 572.45 | 4978.1 | 5325.38 | 4869.02 | 4827.19 | 5041.85 | 4617.66 | 3726.34 |
40 | 636.77 | 4712.09 | 5039.81 | 5137.69 | 5251.55 | 5020.58 | 4384.43 | 4029.66 |
50 | 698.35 | 4940.71 | 5275.37 | 5195.35 | 5194.27 | 5014.54 | 4686.04 | 4015.42 |
60 | 761.51 | 5181.88 | 5030.69 | 5117.18 | 5184.03 | 4926.59 | 4920.78 | 4234.42 |
D组
工作线程数 | 插入1条/次 | 插入2000条/次 | 1000条/次 | 500条/次 | 250条/次 | 125条/次 | 50条/次 | 20条/次 |
---|---|---|---|---|---|---|---|---|
1 | 34.88 | 527.59 | 521.95 | 521.89 | 497.07 | 471.48 | 396.65 | 308.53 |
5 | 113.16 | 1906.94 | 1763.67 | 1799.86 | 1741.25 | 1621.8 | 1469.72 | 1083.42 |
10 | 224.51 | 1952.36 | 2761.67 | 2669.51 | 2670.94 | 2501.25 | 2164.97 | 1652.07 |
20 | 446.84 | 4017.68 | 4421.65 | 5135.58 | 4728.13 | 4723.22 | 4260.76 | 3122.27 |
30 | 656.23 | 4950.99 | 4664.18 | 4710.76 | 5171.17 | 5003.0 | 4560.38 | 3833.77 |
40 | 758.24 | 4859.09 | 4968.7 | 5034.74 | 5295.49 | 5150.39 | 4673.33 | 4153.51 |
50 | 799.72 | 4965.24 | 5167.42 | 5208.88 | 5212.13 | 5182.42 | 4811.39 | 4154.2 |
60 | 827.83 | 4996.5 | 4989.52 | 5131.89 | 5144.56 | 5105.17 | 4960.81 | 4189.01 |