数据库为mysql5.7,可以准备两个,注意mysql-connector-java的版本要与数据库版本对应起来
maven中加入Atomikos的依赖
org.springframework.boot
spring-boot-starter-jta-atomikos
其他的依赖可以根据架构自己添加
数据源注解,不同数据源的service方法上加上@DataSource注解,使用AOP切换数据源
package com.luck.datasources.annotation;
import java.lang.annotation.*;
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface DataSource {
String name() default "";
}
动态数据源
package com.luck.datasources;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
public class DynamicDataSource extends AbstractRoutingDataSource {
private static final ThreadLocal CONTEXTHOLDER = new ThreadLocal<>();
@Override
protected Object determineCurrentLookupKey() {
return getDataSource();
}
public static void setDataSource(String dataSource) {
CONTEXTHOLDER.set(dataSource);
}
public static String getDataSource() {
return CONTEXTHOLDER.get();
}
public static void clearDataSource() {
CONTEXTHOLDER.remove();
}
}
数据源AOP
package com.luck.datasources.aspect;
import java.lang.reflect.Method;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.core.Ordered;
import org.springframework.stereotype.Component;
import com.luck.datasources.DynamicDataSource;
import com.luck.datasources.annotation.DataSource;
@Aspect
@Component
public class DataSourceAspect implements Ordered {
@Pointcut("@annotation(com.luck.datasources.annotation.DataSource)")
public void dataSourcePointCut() {
}
@Around("dataSourcePointCut()")
public Object around(ProceedingJoinPoint point) throws Throwable {
MethodSignature signature = (MethodSignature) point.getSignature();
Method method = signature.getMethod();
DataSource ds = method.getAnnotation(DataSource.class);
if (ds == null) {
DynamicDataSource.setDataSource("first");
} else {
DynamicDataSource.setDataSource(ds.name());
}
try {
return point.proceed();
} finally {
DynamicDataSource.clearDataSource();
}
}
@Override
public int getOrder() {
return 1;
}
}
配置两个数据库数据源,为每个数据源配置一个SQLSessionFactory,这样不同数据源的业务使用自己的session,相互隔离,当发生数据源切换的时候,也不会造成找不到数据库表的问题。
package com.luck.config;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.apache.ibatis.logging.stdout.StdOutImpl;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.jta.atomikos.AtomikosDataSourceBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.stereotype.Component;
import com.alibaba.druid.pool.xa.DruidXADataSource;
import com.luck.datasources.DynamicDataSource;
@Configuration
@Component
public class DataSourceConfig {
@Value("${spring.datasource.druid.driver-class-name}")
private String driverClass;
@Value("${spring.datasource.druid.first.url}")
private String jdbcUrl;
@Value("${spring.datasource.druid.first.username}")
private String username;
@Value("${spring.datasource.druid.first.password}")
private String password;
@Value("${spring.datasource.druid.initial-size}")
private int initialSize;
@Value("${spring.datasource.druid.min-idle}")
private int minIdle;
@Value("${spring.datasource.druid.max-active}")
private int maxActive;
@Value("${spring.datasource.druid.max-wait}")
private long maxWaitMillis;
@Value("${spring.datasource.druid.time-between-eviction-runs-millis}")
private long timeBetweenEvictionRunsMillis;
@Value("${spring.datasource.druid.min-evictable-idle-time-millis}")
private long minEvictableIdleTimeMillis;
@Value("${spring.datasource.druid.validation-query}")
private String validationQuery;
@Value("${spring.datasource.druid.test-while-idle}")
private boolean testWhileIdle;
@Value("${spring.datasource.druid.test-on-borrow}")
private boolean testOnBorrow;
@Value("${spring.datasource.druid.remove-abandoned}")
private boolean removeAbandoned;
@Value("${spring.datasource.druid.remove-abandoned-timeout}")
private int removeAbandonedTimeout;
@Value("${spring.datasource.druid.filter}")
private String filters;
@Value("${spring.datasource.druid.second.url}")
private String secondurl;
@Value("${spring.datasource.druid.second.username}")
private String secondusername;
@Value("${spring.datasource.druid.second.password}")
private String secondpassword;
/**
* 数据源1配置
*/
@Primary
@Bean(name = "first")
public DataSource first() throws SQLException {
DruidXADataSource first = new DruidXADataSource();
first.setDriverClassName(driverClass);
first.setUrl(jdbcUrl);
first.setUsername(username);
first.setPassword(password);
first.setInitialSize(initialSize);
first.setMinIdle(minIdle);
first.setMaxActive(maxActive);
first.setMaxWait(maxWaitMillis);
first.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
first.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
first.setValidationQuery(validationQuery);
first.setTestWhileIdle(testWhileIdle);
first.setTestOnBorrow(testOnBorrow);
first.setRemoveAbandoned(removeAbandoned);
first.setRemoveAbandonedTimeout(removeAbandonedTimeout);
first.setFilters(filters);
AtomikosDataSourceBean xaDataSource = new AtomikosDataSourceBean();
xaDataSource.setXaDataSource(first);
xaDataSource.setUniqueResourceName("first");
return xaDataSource;
}
/**
* 数据源2配置
*/
@Bean(name = "second")
public DataSource second() throws SQLException {
DruidXADataSource second = new DruidXADataSource();
second.setDriverClassName(driverClass);
second.setUrl(secondurl);
second.setUsername(secondusername);
second.setPassword(secondpassword);
second.setInitialSize(initialSize);
second.setMinIdle(minIdle);
second.setMaxActive(maxActive);
second.setMaxWait(maxWaitMillis);
second.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
second.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
second.setValidationQuery(validationQuery);
second.setTestWhileIdle(testWhileIdle);
second.setTestOnBorrow(testOnBorrow);
second.setRemoveAbandoned(removeAbandoned);
second.setRemoveAbandonedTimeout(removeAbandonedTimeout);
second.setFilters(filters);
AtomikosDataSourceBean xaDataSource = new AtomikosDataSourceBean();
xaDataSource.setXaDataSource(second);
xaDataSource.setUniqueResourceName("second");
return xaDataSource;
}
/**
* 动态数据源配置
* @param first 数据库1
* @param second 数据库2
* @return 返回动态数据源
*/
@Bean(name = "dataSource")
public DynamicDataSource dataSource(@Qualifier("first") DataSource first, @Qualifier("second") DataSource second) {
Map
项目中使用的是tk.mybatis,其他的mybatis框架配置差不太多,这里面主要做的工作是为不同数据源的业务提供不同的扫描,项目中不同的数据源业务要使用不同包名做区分,否则在使用事务时,如果不同的业务在同一个包中会被认为是同一个session,导致出现找不到表的错误
package com.luck.config;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import tk.mybatis.mapper.autoconfigure.MapperAutoConfiguration;
import tk.mybatis.spring.mapper.MapperScannerConfigurer;
@Configuration
@AutoConfigureAfter(MapperAutoConfiguration.class)
public class MapperScannerConfig {
@Bean
public MapperScannerConfigurer mapperScannerConfigurer() {
MapperScannerConfigurer mapperScannerConfigurer = new MapperScannerConfigurer();
mapperScannerConfigurer.setSqlSessionFactoryBeanName("sqlSessionFactory");
mapperScannerConfigurer.setBasePackage("com.luck.business1**.mapper");
mapperScannerConfigurer.setMarkerInterface(com.luck.base.mapper.BaseMapper.class);
return mapperScannerConfigurer;
}
@Bean
public MapperScannerConfigurer secondMapperScannerConfigurer() {
MapperScannerConfigurer mapperScannerConfigurer = new MapperScannerConfigurer();
mapperScannerConfigurer.setSqlSessionFactoryBeanName("secondSqlSessionFactory");
mapperScannerConfigurer.setBasePackage("com.luck.business2.**.mapper");
mapperScannerConfigurer.setMarkerInterface(com.luck.base.mapper.BaseMapper.class);
return mapperScannerConfigurer;
}
}
package com.luck.config;
import javax.transaction.TransactionManager;
import javax.transaction.UserTransaction;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.jta.JtaTransactionManager;
import com.atomikos.icatch.jta.UserTransactionImp;
import com.atomikos.icatch.jta.UserTransactionManager;
@Configuration
@EnableTransactionManagement
public class XATransactionManagerConfig {
@Bean(name = "userTransaction")
public UserTransaction userTransaction() throws Throwable {
UserTransactionImp userTransactionImp = new UserTransactionImp();
userTransactionImp.setTransactionTimeout(10000);
return userTransactionImp;
}
@Bean(name = "atomikosTransactionManager", initMethod = "init", destroyMethod = "close")
public TransactionManager atomikosTransactionManager() throws Throwable {
UserTransactionManager userTransactionManager = new UserTransactionManager();
userTransactionManager.setForceShutdown(false);
return userTransactionManager;
}
@Bean(name = "transactionManager")
@DependsOn({ "userTransaction", "atomikosTransactionManager" })
public PlatformTransactionManager transactionManager() throws Throwable {
return new JtaTransactionManager(userTransaction(), atomikosTransactionManager());
}
}
去掉默认的数据源配置
package com.luck;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
@SpringBootApplication(exclude = DataSourceAutoConfiguration.class)
public class MainApplication {
public static void main(String[] args) {
SpringApplication.run(MainApplication.class, args);
}
}
@DataSource(name = "first")
@Override
public void test1() {
// mapper1.update()
}
@DataSource(name = "second")
@Override
public void test2() {
// mapper2.insert()
}
@Transactional(rollbackFor = Exception.class)
@Override
public void test3() {
test2();
test1();
int i = 1/0; // 异常
}