SpringCloud + MyBatis + Druid + ShardingSphere 整合之分库分表

对ShardingSphere做了一层包装,让它可以支持分库分表的同时也可以支持没有分库分表的数据表,由于我这边公司用的是阿里云的mysql自带的读写分离,所以关于读写分离的部分我没写

版本:

<!-- SpringCloud -->
<dependency>
    <groupId>org.springframework.cloud</groupId>
    <artifactId>spring-cloud-dependencies</artifactId>
    <version>Dalston.SR1</version>
    <type>pom</type>
    <scope>import</scope>
</dependency>
<!-- SpringBoot -->
<parent>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-parent</artifactId>
    <version>1.5.4.RELEASE</version> 
</parent>
<!-- Mybatis -->
<dependency>
    <groupId>org.mybatis</groupId>
    <artifactId>mybatis</artifactId>
    <version>3.3.1</version>
</dependency>
<dependency>
    <groupId>org.mybatis</groupId>
    <artifactId>mybatis-spring</artifactId>
    <version>1.2.4</version>
</dependency>
<dependency>
    <groupId>tk.mybatis</groupId>
    <artifactId>mapper</artifactId>
    <version>3.4.0</version>
</dependency>
<!-- Druid -->
<dependency>
    <groupId>com.alibaba</groupId>
    <artifactId>druid</artifactId>
    <version>1.0.11</version>
</dependency>
<!-- ShardingSphere -->
<dependency>
    <groupId>io.shardingsphere</groupId>
    <artifactId>sharding-jdbc-core</artifactId>
    <version>3.0.0</version>
</dependency>

配置文件:

shardingSphere:
  dbs:
  - name: db1 # 库名,跟rules里的db${1..3}进行关联
    url: 
    username: 
    password: 
  - name: db2
    url: 
    username: 
    password: 
  - name: db3
    url: 
    username: 
    password:
  rules: # 用于分库分表的table
  - name: table
    # 实际数据库表节点
    # db${1..3} 表示 db1 ~ db3
    # table_$->{0..199} 表示 table_0 ~ table_199
    actualDataNodes: db${1..3}.table_$->{0..199}
    # 数据库分片属性, 如果有分库且需要根据某个属性计算库名,则需要设置
    dbShardingColumn: id
    # 数据库库名计算规则,如果需要根据某个属性进行计算,则需要设置dbShardingColumn
    dbAlgorithmExpression: db${id % 3}
    # 表分片属性,如果有分表且需要根据某个属性计算库名,则需要设置
    tableShardingColumn: id
    # 表名计算规则,如果需要根据某个属性进行计算,则需要设置tableShardingColumn
    tableAlgorithmExpression: table1_${id % 200}
  tbs: # 用于没有分库分表的table
  - name: db1 # 库名,表示db1里的tables相关表是没有分库分表的数据表
    tables: tableA, tableB, tableC # tableA,tableB,tableC表示这3张表没有分库分表
props:
  sql.show: true # 是否显示sql

  

配置类:

import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;

import java.util.List;
import java.util.Map;

@Data
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "shardingSphere")
public class ShardingSphereConfig {
    private List<Map<String, String>> dbs;
    private List<Map<String, String>> rules;
    private List<Map<String, String>> tbs;
}

MybatisBean:

import com.alibaba.druid.pool.DruidDataSource;
import com.github.pagehelper.PageHelper;
import io.shardingsphere.api.config.ShardingRuleConfiguration;
import io.shardingsphere.api.config.TableRuleConfiguration;
import io.shardingsphere.api.config.strategy.InlineShardingStrategyConfiguration;
import io.shardingsphere.shardingjdbc.api.ShardingDataSourceFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.bind.RelaxedPropertyResolver;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;

/**
 * mybatis 配置数据源类
 */
@Configuration
@EnableTransactionManagement
public class MybatisConfiguration implements EnvironmentAware {
    @Autowired
    private ShardingSphereConfig shardingSphereConfig;
    private RelaxedPropertyResolver propertyResolver;
    private String driveClassName;
    /* mybatis 参数 */
    private String xmlLocation;
    private String typeAliasesPackage;
    /*  druid参数 */
    private String filters;
    private String maxActive;
    private String initialSize;
    private String maxWait;
    private String minIdle;
    private String timeBetweenEvictionRunsMillis;
    private String minEvictableIdleTimeMillis;
    private String validationQuery;
    private String testWhileIdle;
    private String testOnBorrow;
    private String testOnReturn;
    private String poolPreparedStatements;
    private String maxOpenPreparedStatements;

    @Override
    public void setEnvironment(Environment environment) {
        this.propertyResolver = new RelaxedPropertyResolver(environment, null);
        // 封装druid参数
        this.driveClassName = propertyResolver.getProperty("spring.datasource.driver-class-name");
        this.filters = propertyResolver.getProperty("spring.datasource.filters");
        this.maxActive = propertyResolver.getProperty("spring.datasource.maxActive");
        this.initialSize = propertyResolver.getProperty("spring.datasource.initialSize");
        this.maxWait = propertyResolver.getProperty("spring.datasource.maxWait");
        this.minIdle = propertyResolver.getProperty("spring.datasource.minIdle");
        this.timeBetweenEvictionRunsMillis = propertyResolver.getProperty("spring.datasource.timeBetweenEvictionRunsMillis");
        this.minEvictableIdleTimeMillis = propertyResolver.getProperty("spring.datasource.minEvictableIdleTimeMillis");
        this.validationQuery = propertyResolver.getProperty("spring.datasource.validationQuery");
        this.testWhileIdle = propertyResolver.getProperty("spring.datasource.testWhileIdle");
        this.testOnBorrow = propertyResolver.getProperty("spring.datasource.testOnBorrow");
        this.testOnReturn = propertyResolver.getProperty("spring.datasource.testOnReturn");
        this.poolPreparedStatements = propertyResolver.getProperty("spring.datasource.poolPreparedStatements");
        this.maxOpenPreparedStatements = propertyResolver.getProperty("spring.datasource.maxOpenPreparedStatements");
        // 封装mybatis参数
        this.typeAliasesPackage = propertyResolver.getProperty("mybatis.typeAliasesPackage");
        this.xmlLocation = propertyResolver.getProperty("mybatis.xmlLocation");
    }

    @Bean
    public DataSource shardingDataSource() {
        // 封装数据库连接信息
        Map<String, DataSource> dataSourceMap = new HashMap<>();
        shardingSphereConfig.getDbs().forEach(config -> {
            DruidDataSource dataSource = druidDataSource(config.get("url"), config.get("username"), config.get("password"));
            dataSourceMap.put(config.get("name"), dataSource);
        });
        
        ShardingRuleConfiguration shardingConfig = new ShardingRuleConfiguration();
        // 封装有做分库分表的表结构信息
        shardingSphereConfig.getRules().forEach(config -> {
            TableRuleConfiguration rule = new TableRuleConfiguration();
            rule.setLogicTable(config.get("name"));
            rule.setActualDataNodes(config.get("actualDataNodes"));
            rule.setTableShardingStrategyConfig(new InlineShardingStrategyConfiguration(config.get("tableShardingColumn"),
                    config.get("tableAlgorithmExpression")));
            rule.setDatabaseShardingStrategyConfig(new 
InlineShardingStrategyConfiguration(config.get("dbShardingColumn"),
                    config.get("dbAlgorithmExpression"));    
            shardingConfig.getTableRuleConfigs().add(rule);
        });
        // 封装没有做分库分表的表结构信息
        shardingSphereConfig.getTbs().forEach(config -> {
            String[] tableNames = StringUtils.deleteWhitespace(config.get("tables")).split(",");
            for (String name : tableNames) {
                if (StringUtils.isBlank(name)) {
                    continue;
                }
                String dbName = config.get("name");
                TableRuleConfiguration rule = new TableRuleConfiguration();
                rule.setLogicTable(name);
                StringBuilder builder = new StringBuilder();
                builder.append(dbName).append(".").append(name);
                rule.setActualDataNodes(builder.toString());
                shardingConfig.getTableRuleConfigs().add(rule);
            }
        });
        DataSource dataSource = null;
        try {
            dataSource = ShardingDataSourceFactory.createDataSource(dataSourceMap, shardingConfig,
                    new ConcurrentHashMap(), new Properties());
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return dataSource;
    }

    private DruidDataSource druidDataSource(String url, String userName, String password) {
        DruidDataSource druidDataSource = new DruidDataSource();
        druidDataSource.setUrl(url);
        druidDataSource.setUsername(userName);
        druidDataSource.setPassword(password);
        druidDataSource.setDriverClassName(
                StringUtils.isNotBlank(driveClassName) ? driveClassName : "com.mysql.jdbc.Driver");
        druidDataSource.setMaxActive(StringUtils.isNotBlank(maxActive) ? Integer.parseInt(maxActive) : 10);
        druidDataSource.setInitialSize(StringUtils.isNotBlank(initialSize) ? Integer.parseInt(initialSize) : 1);
        druidDataSource.setMaxWait(StringUtils.isNotBlank(maxWait) ? Integer.parseInt(maxWait) : 60000);
        druidDataSource.setMinIdle(StringUtils.isNotBlank(minIdle) ? Integer.parseInt(minIdle) : 3);
        druidDataSource.setTimeBetweenEvictionRunsMillis(StringUtils.isNotBlank(timeBetweenEvictionRunsMillis)
                ? Integer.parseInt(timeBetweenEvictionRunsMillis)
                : 60000);
        druidDataSource.setMinEvictableIdleTimeMillis(StringUtils.isNotBlank(minEvictableIdleTimeMillis)
                ? Integer.parseInt(minEvictableIdleTimeMillis)
                : 300000);
        druidDataSource.setValidationQuery(StringUtils.isNotBlank(validationQuery) ? validationQuery : "select 'x'");
        druidDataSource.setTestWhileIdle(!StringUtils.isNotBlank(testWhileIdle) || Boolean.parseBoolean(testWhileIdle));
        druidDataSource.setTestOnBorrow(StringUtils.isNotBlank(testOnBorrow) && Boolean.parseBoolean(testOnBorrow));
        druidDataSource.setTestOnReturn(StringUtils.isNotBlank(testOnReturn) && Boolean.parseBoolean(testOnReturn));
        druidDataSource.setPoolPreparedStatements(
                !StringUtils.isNotBlank(poolPreparedStatements) || Boolean.parseBoolean(poolPreparedStatements));
        druidDataSource.setMaxOpenPreparedStatements(StringUtils.isNotBlank(maxOpenPreparedStatements)
                ? Integer.parseInt(maxOpenPreparedStatements)
                : 20);
        try {
            druidDataSource.setFilters(StringUtils.isNotBlank(filters) ? filters : "stat, wall");
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return druidDataSource;
    }

    @Bean(name = "sqlSessionFactory")
    public SqlSessionFactory sqlSessionFactoryBean(DataSource dataSource) {
        SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
        bean.setDataSource(dataSource);
        if (StringUtils.isNotBlank(typeAliasesPackage)) {
            bean.setTypeAliasesPackage(typeAliasesPackage);
        }
        // 分页插件
        PageHelper pageHelper = new PageHelper();
        Properties properties = new Properties();
        properties.setProperty("reasonable", "false");
        properties.setProperty("supportMethodsArguments", "true");
        properties.setProperty("returnPageInfo", "check");
        properties.setProperty("params", "count=countSql");
        pageHelper.setProperties(properties);
        // 添加XML目录
        ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
        Interceptor[] plugins = new Interceptor[]{pageHelper};
        bean.setPlugins(plugins);
        try {
            bean.setMapperLocations(resolver.getResources(xmlLocation));
            //取消二级缓存
            bean.getObject().getConfiguration().setCacheEnabled(false);
            return bean.getObject();
        } catch (Exception e) {
            e.printStackTrace();
            throw new RuntimeException(e);
        }
    }

    @Bean
    public SqlSessionTemplate sqlSessionTemplate(SqlSessionFactory sqlSessionFactory) {
        return new SqlSessionTemplate(sqlSessionFactory);
    }

    @Bean
    public DataSourceTransactionManager transactionManager(DataSource dataSource) {
        return new DataSourceTransactionManager(dataSource);
    }

    @Bean
    public PageHelper getPageHelper() {
        PageHelper pageHelper = new PageHelper();
        Properties properties = new Properties();
        properties.setProperty("helperDialect", "mysql");
        properties.setProperty("reasonable", "true");
        properties.setProperty("supportMethodsArguments", "true");
        properties.setProperty("params", "count=countSql");
        pageHelper.setProperties(properties);
        return pageHelper;
    }

    @Bean
    public JdbcTemplate jdbcTemplate(DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }
}

结束!

如果帮到你,请点个赞吧 O(∩_∩)O~

猜你喜欢

转载自blog.csdn.net/qq171563857/article/details/88694146