springboot-yml配置+aop实现数据库读写分离

1.配置application-dev.yml

spring:
    aop:
        proxy-target-class: true
    datasource:
        readSize: 2
        type: com.alibaba.druid.pool.DruidDataSource
        driverClassName: com.mysql.jdbc.Driver
        druid:
            first:  #数据源1
                url: jdbc:mysql://ip/bfec_erp?allowMultiQueries=true&useUnicode=true&characterEncoding=UTF-8
                username: 
                password: 
#            password : ENC(CMTWfWzC/iXWiW/PeFPJ9dYlpBHwOkxc)
            db_master:  #数据源2
                url: jdbc:mysql://127.0.0.1:3306/db_master?charSet=utf-8
                username: root
                password: root
            db_other:  #数据源3
                url: jdbc:mysql://127.0.0.1:3306/db_other?charSet=utf-8
                username: root
                password: root

            initial-size: 10
            max-active: 100
            min-idle: 10
            max-wait: 60000
            pool-prepared-statements: true
            max-pool-prepared-statement-per-connection-size: 20
            time-between-eviction-runs-millis: 60000
            min-evictable-idle-time-millis: 300000
            validation-query: SELECT 1 FROM DUAL
            test-while-idle: true
            test-on-borrow: false
            test-on-return: false
            stat-view-servlet:
                enabled: true
                url-pattern: /druid/*
                #login-username: admin
                #login-password: admin
            filter:
                stat:
                    log-slow-sql: true
                    slow-sql-millis: 3000
                    merge-sql: true
                wall:
                    config:
                        multi-statement-allow: true
local:
    warehouseInterface: http://localhost:0000/admin/local/warehouse/names/
 

2.新建数据库配置类DataSourceConfiguration

package com.bfecerp.common.datasource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

/**
 * @Description
 * @Author zk
 * @createTime 2018/6/19 13:52
 */
@Configuration
public class DataSourceConfiguration {
    private static Logger log = LoggerFactory.getLogger(DataSourceConfiguration.class);
    @Value("${spring.datasource.type}")
    private Class<? extends DataSource> dataSourceType;

    @Bean(name="writeDataSource", destroyMethod = "close", initMethod="init")
    @Primary
    @ConfigurationProperties(prefix = "spring.datasource.druid.first")
    public DataSource writeDataSource() {
        log.info("-------------------- writeDataSource init ---------------------");
        return DataSourceBuilder.create().type(dataSourceType).build();
    }
    /**
     * 有多少个从库就要配置多少个
     * @return
     */
    @Bean(name = "readDataSource", destroyMethod = "close", initMethod="init")
    @ConfigurationProperties(prefix = "spring.datasource.druid.db_master")
    public DataSource masterDataSource(){
        log.info("-------------------- masterDataSource init ---------------------");
        return DataSourceBuilder.create().type(dataSourceType).build();
    }
    @Bean(name = "readDataSource", destroyMethod = "close", initMethod="init")
    @ConfigurationProperties(prefix = "spring.datasource.druid.db_other")
    public DataSource otherDataSource(){
        log.info("-------------------- otherDataSource init ---------------------");
        return DataSourceBuilder.create().type(dataSourceType).build();
    }
    /**
     * 这里的list是多个从库的情况下为了实现简单负载均衡
     * @return
     * @throws SQLException
     */
    @Bean("readDataSources")
    public List<DataSource> readDataSources() throws SQLException{
        List<DataSource> dataSources=new ArrayList<>();
        dataSources.add(masterDataSource());
        dataSources.add(otherDataSource());
        return dataSources;
    }
}

3.新建DataSourceContextHolder类,根据ThreadLocal来实现数据源的动态改变

package com.bfecerp.common.datasource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * @Description
 * @Author zk
 * @createTime 2018/6/19 13:53
 */
public class DataSourceContextHolder {
    private static Logger log = LoggerFactory.getLogger(DataSourceContextHolder.class);
    private static final ThreadLocal<String> local = new ThreadLocal<String>();
    public static ThreadLocal<String> getLocal() {
        return local;
    }
    /**
     * 读可能是多个库
     */
    public static void read() {
        local.set(DataSourceType.read.getType());
        System.out.println("==:" + DataSourceType.read.getType());
        log.info("数据库切换到=读库...");
    }
    /**
     * 写只有一个库
     */
    public static void write() {
        local.set(DataSourceType.write.getType());
        log.info("数据库切换到=写库...");
    }
    public static String getJdbcType() {
        return local.get();
    }
}

4.新建一个枚举类DataSourceType

package com.bfecerp.common.datasource;

/**
 * @Description
 * @Author zk
 * @createTime 2018/6/19 13:54
 */
public enum DataSourceType {
    read("read", "从库"),
    write("write", "主库");
    private String type;
    private String name;
    DataSourceType(String type, String name) {
        this.type = type;
        this.name = name;
    }
    public String getType() {
        return type;
    }
    public void setType(String type) {
        this.type = type;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
}

5.新建MybatisConfiguration类

package com.bfecerp.common.datasource;

import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import org.springframework.transaction.annotation.EnableTransactionManagement;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @Description
 * @Author zk
 * @createTime 2018/6/19 13:54
 */
@Configuration
@ConditionalOnClass({EnableTransactionManagement.class})
@Import({DataSourceConfiguration.class})
@MapperScan(basePackages = {"com.bfecerp.modules.sys.dao"})
public class MybatisConfiguration {
    @Value("${spring.datasource.type}")
    private Class<? extends DataSource> dataSourceType;
    @Value("${spring.datasource.readSize}")
    private String dataSourceSize;

//    @Resource(name = "writeDataSource")
//    private DataSource writeDataSource;
//    @Qualifier("readDataSource")
//    private DataSource readDataSource;
    @Bean
    @ConditionalOnMissingBean
    public SqlSessionFactory sqlSessionFactory(ApplicationContext ac) throws Exception {
        SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
        sqlSessionFactoryBean.setDataSource(roundRobinDataSouceProxy(ac));
        ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
        sqlSessionFactoryBean.setMapperLocations(resolver.getResources("classpath:mapper/import/*.xml"));
        sqlSessionFactoryBean.setTypeAliasesPackage("com.bfecerp.modules.sys.entity");
        sqlSessionFactoryBean.getObject().getConfiguration().setMapUnderscoreToCamelCase(true);
        return sqlSessionFactoryBean.getObject();
    }

    /**
     * 有多少个数据源就要配置多少个bean
     *
     * @return
     */
    @Bean
    public AbstractRoutingDataSource roundRobinDataSouceProxy(ApplicationContext ac) {
        int size = Integer.parseInt(dataSourceSize);
        System.out.println("size:" + size);
        MyAbstractRoutingDataSource proxy = new MyAbstractRoutingDataSource(size);
        Map<Object, Object> targetDataSources = new HashMap<Object, Object>();
        //多个读数据库时
        DataSource writeDataSource = (DataSource) ac.getBean("writeDataSource");
        List<DataSource> readDataSources = (List<DataSource>) ac.getBean("readDataSources");
        for (int i = 0; i < size; i++) {
            targetDataSources.put(i, readDataSources.get(i));
        }
        proxy.setDefaultTargetDataSource(writeDataSource);
        proxy.setTargetDataSources(targetDataSources);
        return proxy;
    }
}

把第2步注册的bean放入一个map里面,后面就可以动态从这个map里面获取对应的数据源

注意:用@Resource和@Qualifier这两种方式都无法获取到第2步注册的bean,只能是通过applicationContext上下文获取,应该是跟注解的优先级有关,Resource和Qualifier先执行,这个时候第2步的bean还未注册,所以取不到

6.新建MyAbstractRoutingDataSource

package com.bfecerp.common.datasource;

import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

import java.util.concurrent.atomic.AtomicInteger;

/**
 * @Description
 * @Author zk
 * @createTime 2018/6/19 13:58
 */
public class MyAbstractRoutingDataSource extends AbstractRoutingDataSource {
    private final int dataSourceNumber;
    private AtomicInteger count = new AtomicInteger(0);
    public MyAbstractRoutingDataSource(int dataSourceNumber) {
        this.dataSourceNumber = dataSourceNumber;
    }
    @Override
    protected Object determineCurrentLookupKey() {
        String typeKey = DataSourceContextHolder.getJdbcType();
        if (DataSourceType.write.getType().equals(typeKey)||typeKey==null) {
            System.out.println("注解类型=="+typeKey);
            return DataSourceType.write.getType();
        }
        System.out.println("注解类型=="+typeKey);
        // 读 简单负载均衡
        int number = count.getAndAdd(1);
        int lookupKey = number % dataSourceNumber;
        return new Integer(lookupKey);
    }
}
 

这里的determineCurrentLookupKey方法是根据DataSourceContextHolder这个类所改变的数据源而返回对应的bean的key,

这里的key要跟第5步放入map里面的key对应上

7.新建springAOP类

package com.bfecerp.common.datasource;

import com.bfecerp.common.annotation.ReadDataSource;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import java.lang.reflect.Method;

/**
 * @Description
 * @Author zk
 * @createTime 2018/6/19 13:57
 */
@Aspect
@Component
public class DataSourceAop {
    private static Logger log = LoggerFactory.getLogger(DataSourceAop.class);

    @Pointcut("@annotation(com.bfecerp.common.annotation.WriteDataSource)")
    public void writeMethod(){}

    @Pointcut("@annotation(com.bfecerp.common.annotation.ReadDataSource)")
    public void readMethod(){}

    @Before("writeMethod()")
    public void beforeWrite(JoinPoint point) {
        DataSourceContextHolder.write();
        String className = point.getTarget().getClass().getName();
        String methodName = point.getSignature().getName();
        System.out.println("开始执行:"+className+"."+methodName+"()方法...");
        log.info("dataSource切换到:write");
    }
    @Before("readMethod()")
    public void beforeRead(JoinPoint point) throws ClassNotFoundException {
        //设置数据库为读数据
        DataSourceContextHolder.read();
        /*spring AOP测试代码*/
        String currentClassName = point.getTarget().getClass().getName();//根据切点获取当前调用的类名
        String methodName = point.getSignature().getName();//根据切点获取当前调用的类方法
        Object[] args = point.getArgs();//根据切点获取当前类方法的参数
        System.out.println("开始执行:"+currentClassName+"."+methodName+"()方法...");
        Class reflexClassName = Class.forName(currentClassName);//根据反射获取当前调用类的实例
        Method[] methods = reflexClassName.getMethods();//获取该实例的所有方法
        for(Method method : methods){
            if(method.getName().equals(methodName)){
                String desrciption = method.getAnnotation(ReadDataSource.class).description();//获取该实例方法上注解里面的描述信息
                System.out.println("desrciption:" + desrciption);
            }
        }
        log.info("dataSource切换到:Read");
    }
}

利用springAOP对方法的切入,在方法执行前判断使用哪个数据源

@Pointcut("@annotation(com.aop.writeAndRead.config.WriteDataSource)"

这里是对自定义注解作切点,双引号里面也可以换成对方法,但是个人觉得如果对方法作切点的话,如果方法多了这里写的就很长了

8.新建注解类

package com.bfecerp.common.annotation;

import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;

/**
 * @Description
 * @Author zk
 * @createTime 2018/6/19 13:58
 */
@Target({ElementType.METHOD, ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface ReadDataSource {
    String description() default "";
}
package com.bfecerp.common.annotation;

import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;

/**
 * @Description
 * @Author zk
 * @createTime 2018/6/19 13:58
 */
@Target({ElementType.METHOD, ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface WriteDataSource {
    String description() default "";
}

这样在对需要控制数据源的方法前加上这个注解,springAOP就能控制这个方法,先选择数据源再执行方法

测试:

在方法上加入注解,如下

@RestController
@RequestMapping("/test")
public class TestController {
    @Autowired
    private UserInfoService userInfoServiceImpl;

    @RequestMapping("/other")
    public void getByOther(){
        List<UserInfo> userInfos = userInfoServiceImpl.listAll();
        System.out.println("从other库查到的数据=="+userInfos);
    }

    @RequestMapping("/master")
    public void getByMaster(){
        List<UserInfo> userInfos = userInfoServiceImpl.getByMaster();
        System.out.println("从master库查到的数据=="+userInfos);
    }

}
@Service("userInfoService")
public class UserInfoServiceImpl implements UserInfoService {
    private static final Logger LOG = Logger.getLogger(UserInfoServiceImpl.class);
    @Autowired
    private UserInfoMapper userInfoMapper;

    @WriteDataSource(description="WRITE")
    public void writeUser(UserInfo userInfo){
        userInfoMapper.writeUser(userInfo);
    }

    @ReadDataSource(description="READ")
    @Transactional(propagation= Propagation.REQUIRED,isolation= Isolation.DEFAULT,readOnly=true)
    public Map<String, String> readUser(){
        return userInfoMapper.readUser();
    }

    @Override
    //@ReadDataSource(description="READ")
    //@WriteDataSource(description="WRITE")
    public List<UserInfo> listAll() {
        return userInfoMapper.listAll();
    }

    //@ReadDataSource(description="READ")
    //@WriteDataSource(description="WRITE")
    //@Transactional(propagation= Propagation.REQUIRED,isolation= Isolation.DEFAULT,readOnly=true)
    @Override
    public List<UserInfo> getByMaster() {
        return userInfoMapper.getByMaster();
    }

    @Override
    public int insert(UserInfo userInfo) {
        return 0;
    }
}

猜你喜欢

转载自blog.csdn.net/APM800/article/details/80750737