利用ThreadPoolTaskExecutor多线程大数据插入

开发目的

大批量数据导入到mysql数据库

1.创建线程池

import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;

import java.util.concurrent.Executor;
import java.util.concurrent.ThreadPoolExecutor;

/**
 * @author yt
 * @create 2023/2/15 14:26
 */
@Configuration
@EnableAsync
@Slf4j
public class ExecutorConfig {

    @Value("${executor.thread.core_pool_size}")
    private int corePoolSize;
    @Value("${executor.thread.max_pool_size}")
    private int maxPoolSize;
    @Value("${executor.thread.queue_capacity}")
    private int queueCapacity;
    @Value("${executor.thread.prefix}")
    private String namePrefix;

    @Bean(name = "asyncServiceExecutor")
    public Executor asyncServiceExecutor() {
        log.warn("startasyncServiceExecutor");
        //在这里修改
        ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
        //配置核心线程数
        executor.setCorePoolSize(corePoolSize);
        //配置最大线程数
        executor.setMaxPoolSize(maxPoolSize);
        //配置队列大小
        executor.setQueueCapacity(queueCapacity);
        //配置线程池中的线程的名称前缀
        executor.setThreadNamePrefix(namePrefix);
        // rejection-policy:当pool已经达到max size的时候,如何处理新任务
        // CALLER_RUNS:不在新线程中执行任务,而是有调用者所在的线程来执行
        executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
        //执行初始化
        executor.initialize();
        return executor;
    }
}

2.线程池参数配置文件 application.yml

executor:
  thread:
    #配置核心线程数
    core_pool_size: 5
    #配置最大线程数
    max_pool_size: 10
    #配置队列大小
    queue_capacity: 100
    #配置线程池中的线程的名称前缀
    prefix: mx

3.线程池创建完毕

首先mybatis-plus中默认提供了一个批量保存数据到数据库的方法saveBatch(),批处理实质上还是一条条的sql去执行,但是它做了预编译优化,只编译一次sql,但是还是一个for循环,一条执行一次,数据量多的时候,效率也不见得很好。所以这里使用mybatis-plus注入器插件

import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.core.injector.AbstractMethod;
import com.baomidou.mybatisplus.core.injector.DefaultSqlInjector;
import com.baomidou.mybatisplus.core.metadata.TableInfo;
import com.baomidou.mybatisplus.extension.injector.methods.InsertBatchSomeColumn;

import java.util.List;


/**
 * 原来自带的是属于for循环插入,这里使用的是foreach
 * @author 于涛
 */
public class EasySqlInjector extends DefaultSqlInjector {

    @Override
    public List<AbstractMethod> getMethodList(Class<?> mapperClass, TableInfo tableInfo) {
        // 注意:此SQL注入器继承了DefaultSqlInjector(默认注入器),调用了DefaultSqlInjector的getMethodList方法,保留了mybatis-plus的自带方法
        List<AbstractMethod> methodList = super.getMethodList(mapperClass, tableInfo);
        methodList.add(new InsertBatchSomeColumn(i -> i.getFieldFill() != FieldFill.UPDATE));
        return methodList;
    }
}

4.将mybatisplus注入器注册到bean

import com.baomidou.mybatisplus.annotation.DbType;
import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.OptimisticLockerInnerInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.PaginationInnerInterceptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

/**
 * @author admin
 */
@Configuration
public class MybatisPlusConfig {

    @Bean
    public MybatisPlusInterceptor mybatisPlusInterceptor(){
        MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor();
        //添加分页插件
        interceptor.addInnerInterceptor(new PaginationInnerInterceptor(DbType.MYSQL));
        //添加乐观锁插件
        interceptor.addInnerInterceptor(new OptimisticLockerInnerInterceptor());
        return interceptor;
    }

    @Bean
    public EasySqlInjector easySqlInjector () {
        return new EasySqlInjector();
    }
}

5.编写批量插入方法,其他mapper继承EasyBaseMapper即可使用

import com.baomidou.mybatisplus.core.mapper.BaseMapper;

import java.util.Collection;

public interface EasyBaseMapper<T> extends BaseMapper<T> {
    /**
     * 批量插入 仅适用于mysql
     *
     * @param entityList 实体列表
     * @return 影响行数
     */
    Integer insertBatchSomeColumn(Collection<T> entityList);
}

6.创建异步接口和实现类

import com.kaying.luck.pojo.file.dos.FileDO;

import java.util.List;

/**
 * @author yt
 * @create 2023/2/15 15:04
 */
public interface AsyncService {
    /**
     * 批量插入文件信息
     * @param fileDOList
     */
    void fileBatchInsert(List<FileDO> fileDOList);
}

@Async("asyncServiceExecutor")注解就是将异步使用多线程来执行操作,调用几次就是开启几个线程,如果超过了最大线程就可以进入线程池队列等待

import com.kaying.luck.mapper.file.FileMapper;
import com.kaying.luck.pojo.file.dos.FileDO;
import com.kaying.luck.service.async.AsyncService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.util.List;

/**
 * @author yt
 * @create 2023/2/15 15:05
 */
@Service
public class AsyncServiceImpl implements AsyncService {

    @Autowired
    private FileMapper fileMapper;

    @Async("asyncServiceExecutor")
    @Override
    public void fileBatchInsert(List<FileDO> fileDOList) {
        fileMapper.insertBatchSomeColumn(fileDOList);
    }

}

7.模拟2000条数据批量插入,如果是1000条以内就单线程,否则开启多线程批量插入

import com.kaying.luck.pojo.file.dos.FileDO;
import com.kaying.luck.service.async.AsyncService;
import com.kaying.luck.service.file.FileTaskService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.List;

/**
 * @author yt
 * @create 2023/2/13 13:37
 */
@Service
public class FileTaskServiceImpl implements FileTaskService {

    private static final Logger logger = LoggerFactory.getLogger(FileTaskService.class);

    //每个线程处理的数据量
    private static final int deviceCount = 1000;

    @Autowired
    private AsyncService asyncService;

    @Override
    public void test() {

        List<FileDO> fileDOList = new ArrayList<>();

        for (int i = 0; i < 2000; i++) {
            FileDO fileDO = new FileDO();
            fileDO.setActivityNo("DD "+i);
            fileDOList.add(fileDO);
        }

        /* 批量下发 */
        try {
            if (fileDOList.size() <= deviceCount) {
                /* 异步处理 */
                asyncService.fileBatchInsert(fileDOList);
            } else {
                List<List<FileDO>> li = createList(fileDOList, deviceCount);
                for (List<FileDO> liop : li) {
                    /* 异步处理 */
                    asyncService.fileBatchInsert(liop);
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            logger.error(e.toString() + " 错误所在行数:" + e.getStackTrace()[0].getLineNumber());
        }
    }

    /**
     * 数据拆分
     *
     * @param targe 集合
     * @param size 多少条数据拆分成一个数组
     * @return
     */
    public List<List<FileDO>> createList(List<FileDO> targe, int size) {
        List<List<FileDO>> listArr = new ArrayList<List<FileDO>>();
        //获取被拆分的数组个数
        int arrSize = targe.size() % size == 0 ? targe.size() / size : targe.size() / size + 1;
        for (int i = 0; i < arrSize; i++) {
            List<FileDO> sub = new ArrayList<FileDO>();
            //把指定索引数据放入到list中
            for (int j = i * size; j <= size * (i + 1) - 1; j++) {
                if (j <= targe.size() - 1) {
                    sub.add(targe.get(j));
                }
            }
            listArr.add(sub);
        }
        return listArr;
    }
}

猜你喜欢

转载自blog.csdn.net/m0_58709145/article/details/129045339