spring+mybatis 实现多数据源切换

背景

相信大家在开发工作中肯定遇到过一个应用可能需要连接两个或以上的数据源进行数据操作,又或者数据库单表数据量过大,导致查询很慢时,一般都会考虑分库分表,这时候也会涉及到多库数据源操作。下面讲解利用spring+mybatis 实现多数据源切换,话不多说直接上代码。

jdbc和log4j的配置

log4j.properties

#定义输出格式
ConversionPattern=%d %-5p [%t] %c - %m%n

log4j.rootLogger=DEBUG,Console
log4j.logger.com.cnblogs.lzrabbit=DEBUG
log4j.logger.org.springframework=ERROR
log4j.logger.org.mybatis=ERROR
log4j.logger.org.apache.ibatis=ERROR
log4j.logger.org.quartz=ERROR
log4j.logger.org.apache.axis2=ERROR
log4j.logger.org.apache.axiom=ERROR
log4j.logger.org.apache=ERROR
log4j.logger.httpclient=ERROR
#log4j.additivity.org.springframework=false
#Console 
log4j.appender.Console=org.apache.log4j.ConsoleAppender
log4j.appender.Console.Threshold=DEBUG 
log4j.appender.Console.Target=System.out 
log4j.appender.Console.layout=org.apache.log4j.PatternLayout 
log4j.appender.Console.layout.ConversionPattern=${ConversionPattern}
#log4j.appender.Console.encoding=UTF-8

#org.apache.log4j.DailyRollingFileAppender
log4j.appender.DailyFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DailyFile.DatePattern='.'yyyy-MM-dd'.log'
log4j.appender.DailyFile.File=${myApp.root}/logs/daily.log
log4j.appender.DailyFile.Append=true
log4j.appender.DailyFile.Threshold=DEBUG
log4j.appender.DailyFile.layout=org.apache.log4j.PatternLayout
log4j.appender.DailyFile.layout.ConversionPattern=${ConversionPattern}
log4j.appender.DailyFile.encoding=UTF-8

# %c 输出日志信息所属的类的全名 
# %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy-MM-dd HH:mm:ss},输出类似:2002-10-18- 22:10:28
# %f 输出日志信息所属的类的类名 
# %l 输出日志事件的发生位置,即输出日志信息的语句处于它所在的类的第几行 
# %m 输出代码中指定的信息,如log(message)中的message 
# %n 输出一个回车换行符,Windows平台为“rn”,Unix平台为“n” 
# %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL。如果是调用debug()输出的,则为DEBUG,依此类推 
# %r 输出自应用启动到输出该日志信息所耗费的毫秒数 
# %t 输出产生该日志事件的线程名

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43

jdbc.properties

#============================================================================
# MySQL
#============================================================================
jdbc.mysql.driver=com.mysql.jdbc.Driver
jdbc.mysql.url=jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true
jdbc.mysql.username=root
jdbc.mysql.password=root

#============================================================================
# MS SQL Server
#============================================================================
#jdbc.sqlserver.driver=com.microsoft.sqlserver.jdbc.SQLServerDriver
#jdbc.sqlserver.url=jdbc:sqlserver://127.0.0.1:1433;database=test;
#jdbc.sqlserver.username=sa
#jdbc.sqlserver.password=sa

#============================================================================
# MS SQL Server (JTDS)
#============================================================================
jdbc.sqlserver.driver=net.sourceforge.jtds.jdbc.Driver
jdbc.sqlserver.url=jdbc:jtds:sqlserver://127.0.0.1:1433/test
jdbc.sqlserver.username=sa
jdbc.sqlserver.password=sa


#============================================================================
# 通用配置
#============================================================================
jdbc.initialSize=5
jdbc.minIdle=5
jdbc.maxIdle=20
jdbc.maxActive=100
jdbc.maxWait=100000
jdbc.defaultAutoCommit=false
jdbc.removeAbandoned=true
jdbc.removeAbandonedTimeout=600
jdbc.testWhileIdle=true
jdbc.timeBetweenEvictionRunsMillis=60000
jdbc.numTestsPerEvictionRun=20
jdbc.minEvictableIdleTimeMillis=300000

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42

多数据源时Spring配置文件

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 xmlns:context="http://www.springframework.org/schema/context"
 xmlns:aop="http://www.springframework.org/schema/aop"
 xsi:schemaLocation="http://www.springframework.org/schema/beans
 http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
 http://www.springframework.org/schema/context
 http://www.springframework.org/schema/context/spring-context-3.0.xsd
 http://www.springframework.org/schema/aop
 http://www.springframework.org/schema/aop/spring-aop-3.0.xsd">
 <bean id="propertyConfigurer" class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
 <property name="location" value="classpath:jdbc.properties"/>
 </bean>
 <bean id="sqlServerDataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close">
 <property name="driverClassName" value="${jdbc.sqlserver.driver}"/>
 <property name="url" value="${jdbc.sqlserver.url}"/>
 <property name="username" value="${jdbc.sqlserver.username}"/>
 <property name="password" value="${jdbc.sqlserver.password}"/>
 <property name="initialSize" value="${jdbc.initialSize}"/>
 <property name="minIdle" value="${jdbc.minIdle}"/>
 <property name="maxIdle" value="${jdbc.maxIdle}"/>
 <property name="maxActive" value="${jdbc.maxActive}"/>
 <property name="maxWait" value="${jdbc.maxWait}"/>
 <property name="defaultAutoCommit" value="${jdbc.defaultAutoCommit}"/>
 <property name="removeAbandoned" value="${jdbc.removeAbandoned}"/>
 <property name="removeAbandonedTimeout" value="${jdbc.removeAbandonedTimeout}"/>
 <property name="testWhileIdle" value="${jdbc.testWhileIdle}"/>
 <property name="timeBetweenEvictionRunsMillis" value="${jdbc.timeBetweenEvictionRunsMillis}"/>
 <property name="numTestsPerEvictionRun" value="${jdbc.numTestsPerEvictionRun}"/>
 <property name="minEvictableIdleTimeMillis" value="${jdbc.minEvictableIdleTimeMillis}"/>
 </bean>
 <bean id="mySqlDataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close">
 <property name="driverClassName" value="${jdbc.mysql.driver}"/>
 <property name="url" value="${jdbc.mysql.url}"/>
 <property name="username" value="${jdbc.mysql.username}"/>
 <property name="password" value="${jdbc.mysql.password}"/>
 <property name="initialSize" value="${jdbc.initialSize}"/>
 <property name="minIdle" value="${jdbc.minIdle}"/>
 <property name="maxIdle" value="${jdbc.maxIdle}"/>
 <property name="maxActive" value="${jdbc.maxActive}"/>
 <property name="maxWait" value="${jdbc.maxWait}"/>
 <property name="defaultAutoCommit" value="${jdbc.defaultAutoCommit}"/>
 <property name="removeAbandoned" value="${jdbc.removeAbandoned}"/>
 <property name="removeAbandonedTimeout" value="${jdbc.removeAbandonedTimeout}"/>
 <property name="testWhileIdle" value="${jdbc.testWhileIdle}"/>
 <property name="timeBetweenEvictionRunsMillis" value="${jdbc.timeBetweenEvictionRunsMillis}"/>
 <property name="numTestsPerEvictionRun" value="${jdbc.numTestsPerEvictionRun}"/>
 <property name="minEvictableIdleTimeMillis" value="${jdbc.minEvictableIdleTimeMillis}"/>
 </bean>
 <bean id="multipleDataSource" class="com.cnblogs.lzrabbit.MultipleDataSource">
 <property name="defaultTargetDataSource" ref="mySqlDataSource"/>
 <property name="targetDataSources">
 <map>
 <entry key="mySqlDataSource" value-ref="mySqlDataSource"/>
 <entry key="sqlServerDataSource" value-ref="sqlServerDataSource"/>
 </map>
 </property>
 </bean>
 <bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
 <property name="dataSource" ref="multipleDataSource"/>
 </bean>

 <!-- mybatis.spring自动映射 -->
 <bean class="org.mybatis.spring.mapper.MapperScannerConfigurer">
 <property name="basePackage" value="com.cnblogs.lzrabbit"/>
 </bean>

 <!-- 自动扫描,多个包以 逗号分隔 -->
 <context:component-scan base-package="com.cnblogs.lzrabbit"/>
 <aop:aspectj-autoproxy/>
</beans>

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73

Java代码编写

MultipleDataSource 多数据源配置类

package com.xxx.gfw.pubfound;

import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

/***
 * 
* Project Name:gfw-public-foundation-impl
* <p>自定义多数据源配置类 
*
* @ClassName: MultipleDataSource
* @date 2018年5月18日 下午4:47:20
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
public class MultipleDataSource extends AbstractRoutingDataSource {

 private static final ThreadLocal<String> dataSourceKey = new InheritableThreadLocal<String>();

 public static void setDataSourceKey(String dataSource) {
 dataSourceKey.set(dataSource);
 }

 @Override
 protected Object determineCurrentLookupKey() {
 return dataSourceKey.get();
 }
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29

自定义注解DataSourceType

package com.xxx.pubfound.aop.anntion;

import java.lang.annotation.*;

/***
 * 
* Project Name:gfw-base-common-service
* <p>自定义数据源类型注解,标志当前的dao接口使用的数据源类型
*
* @ClassName: DataSourceType
* @date 2018年5月18日 下午5:09:49
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
@Target({ ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Documented 
public @interface DataSourceType {
 String value() default "dataSource";
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23

两个service层分别加上DataSourceType 注解

ProvinceServiceImpl.java

package com.xxx.pubfound.service;

import java.util.ArrayList;
import java.util.List;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.xxx.pubfound.aop.anntion.DataSourceType;
import com.xxx.pubfound.dao.SysProvinceDao;
import com.xxx.pubfound.dto.SysProvinceDTO;
import com.xxx.pubfound.struct.SysProvinceListStruct;
import com.xxx.pubfound.struct.SysProvinceStruct;
import com.xxx.rpc.api.AbstractRpcService;

/***
 * 
* Project Name:gfw-public-foundation-impl
* <p> 省份服务层实现类 
*
* @ClassName: ProvinceServiceImpl
* @date 2018年5月18日 下午6:29:35
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
@DataSourceType(value="gfwDataSource")
@Service
public class ProvinceServiceImpl extends AbstractRpcService implements ProvinceService {

 @Autowired
 private SysProvinceDao sysProvinceDao;

 @Override
 public SysProvinceListStruct getProvinceList() {

 List<SysProvinceDTO> list = sysProvinceDao.getProvinceList();

 return beanToStruct(list);
 }

 /***
 * 
 * Project Name: gfw-public-foundation-impl
 * <p>将dto对象封装struct对象 
 *
 * @author youqiang.xiong
 * @date 2018年5月28日 下午3:31:42
 * @version v1.0
 * @since 
 * @param provinceList
 * 省份列表dto
 * @return 省份列表struct
 */
 private SysProvinceListStruct beanToStruct(List<SysProvinceDTO> provinceList){

 if(provinceList == null || provinceList.size() == 0){
 return null;
 }

 List<SysProvinceStruct> resultList = new ArrayList<SysProvinceStruct>();
 for(SysProvinceDTO dto:provinceList){
 SysProvinceStruct struct = new SysProvinceStruct();
 struct.provinceId = dto.getProvinceId();
 struct.provinceName = dto.getProvinceName();
 resultList.add(struct);
 }

 SysProvinceListStruct rsStruct = new SysProvinceListStruct(resultList);
 return rsStruct;
 }

}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75

DefaultExceptionCollector.java

package com.xxx.pubfound.service;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.alibaba.druid.pool.DruidDataSource;
import com.github.pagehelper.PageInfo;
import com.xxx.pubfound.aop.anntion.DataSourceType;
import com.xxx.pubfound.dao.PfExceptionLogDao;
import com.xxx.pubfound.dto.NotifyLevelEnum;
import com.xxx.pubfound.entity.PfExceptionLog;
import com.xxx.pubfound.struct.PagedPfExceptionLogStruct;
import com.xxx.pubfound.struct.PfExceptionLogStruct;
import com.xxx.rpc.api.AbstractRpcService;

/***
 * 
* Project Name:gfw-public-foundation-impl
* <p>异常采集
*
* @ClassName: DefaultExceptionCollector
* @date 2018年5月28日 下午8:01:43
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
@DataSourceType(value="dataSource")
@Service
public class DefaultExceptionCollector extends AbstractRpcService implements ExceptionCollector {

 @Autowired
 PfExceptionLogDao pfExceptionLogDao;

 @Autowired
 DruidDataSource dataSource;

 @Override
 public void collect(long reqTime, String exceptionClass, String stackTrace, int resultCode, String environment,
 String nodeNameEn) {

 //FIXME 采集异常!!
 //TODO luoshan
 System.out.println(dataSource.getConnectProperties());
 PfExceptionLog exceptionLog = new PfExceptionLog();
 exceptionLog.setEnvironment(environment);
 exceptionLog.setExceptionClass(exceptionClass);
 exceptionLog.setExceptionTime(new Date(reqTime));
 exceptionLog.setResultCode(resultCode);
 exceptionLog.setServiceName(nodeNameEn);
 exceptionLog.setStackTrace(stackTrace);
 pfExceptionLogDao.insert(exceptionLog);
 System.out.println("Exception ex:" + exceptionClass);
 System.out.println("Exception ex:" + resultCode);
 }

 @Override
 public void collectNotify(long reqTime, String exceptionClass, String stackTrace, int resultCode,
 String environment, String nodeNameEn, NotifyLevelEnum level) {
 try{
 this.collect(reqTime, exceptionClass, stackTrace, resultCode, environment, nodeNameEn);
 }catch(Exception ex){
 ex.printStackTrace();
 }
 //FIXME 根据不停的level 分别执行策略 start
 if(level.compareTo(NotifyLevelEnum.WARN) == 0){
 //发邮件!
 }else if(level.compareTo(NotifyLevelEnum.ERROR) == 0){
 //发邮件,1/3概率 发短信!
 }else if(level.compareTo(NotifyLevelEnum.FATAL) == 0){
 //发邮件,并且要发短信!
 }
 //FIXME 根据不停的level 分别执行策略 end
 }


 /**
 * 分页获取异常日志列表
 *
 * @param pageNo 页码
 * @param size 每页数据量
 * @param serviceName
 * @param beginTime
 * @param endTime
 *
 * @return
 */
// @Override
// public PagedPfExceptionLogStruct queryExceptionLogList(int pageNo, int size, String serviceName, Long beginTime, Long endTime) {
// PageHelper.startPage(pageNo , size);
// Date beginTimeDate = beginTime == null || beginTime <= 0 ? null : new Date(beginTime);
// Date endTimeDate = endTime == null || beginTime <= 0 ? null : new Date(endTime);
// List<PfExceptionLog> list = pfExceptionLogDao.selectPfExceptionLogList(serviceName , beginTimeDate , endTimeDate);
//
// List<PfExceptionLogStruct> structList = new ArrayList<>();
// if (list != null || !list.isEmpty()) {
// for (PfExceptionLog pfExceptionLog: list) {
// structList.add(entityToStruct(pfExceptionLog));
// }
// }
// PageInfo<PfExceptionLogStruct> page = new PageInfo<>(structList);
// PagedPfExceptionLogStruct result = new PagedPfExceptionLogStruct(page.getPageNum(), page.getTotal(), page.getPages(), page.getList());
// return result;
// }

 @Override
 public PagedPfExceptionLogStruct queryExceptionLogList(int pageNo, int size, String serviceName, Long beginTime, Long endTime) {
 Date beginTimeDate = beginTime == null || beginTime <= 0 ? null : new Date(beginTime);
 Date endTimeDate = endTime == null || beginTime <= 0 ? null : new Date(endTime);
 int offset = pageNo < 1 ? 0 : (pageNo - 1) * size;
 List<PfExceptionLog> list = pfExceptionLogDao.selectPfExceptionLogList(offset , size , serviceName , beginTimeDate , endTimeDate);
 List<PfExceptionLogStruct> structList = new ArrayList<>();
 if (list != null || !list.isEmpty()) {
 for (PfExceptionLog pfExceptionLog: list) {
 structList.add(entityToStruct(pfExceptionLog));
 }
 }
 int total = pfExceptionLogDao.selectPfExceptionLogListCount(serviceName , beginTimeDate , endTimeDate);
 int pages = total % size == 0 ? total/size : total/size + 1;

 PageInfo<PfExceptionLogStruct> page = new PageInfo<>(structList);
 PagedPfExceptionLogStruct result = new PagedPfExceptionLogStruct(pageNo, total, pages, page.getList());
 return result;
 }

 private PfExceptionLogStruct entityToStruct(PfExceptionLog pfExceptionLog) {
 if (pfExceptionLog == null) {
 return null;
 }
 PfExceptionLogStruct pfExceptionLogStruct = new PfExceptionLogStruct();
 pfExceptionLogStruct.id = pfExceptionLog.getId();
 pfExceptionLogStruct.environment = pfExceptionLog.getEnvironment();
 pfExceptionLogStruct.exceptionClass = pfExceptionLog.getExceptionClass();
 pfExceptionLogStruct.exceptionTime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(pfExceptionLog.getExceptionTime());
 pfExceptionLogStruct.resultCode = pfExceptionLog.getResultCode();
 pfExceptionLogStruct.serviceName = pfExceptionLog.getServiceName();
 pfExceptionLogStruct.stackTrace = pfExceptionLog.getStackTrace();
 return pfExceptionLogStruct;
 }

 /**
 * 根据异常日志id 获取异常日志详情
 *
 * @param id 异常日志id
 *
 * @return
 */
 @Override
 public PfExceptionLogStruct queryExceptionLogById(int id) {
 PfExceptionLog pfExceptionLog = pfExceptionLogDao.selectByPrimaryKey(id);
 return entityToStruct(pfExceptionLog);
 }
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159

MultipleDataSourceAop.java多数据源自动切换切面类

package com.xxx.pubfound.aop;

import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import com.shangde.gfw.util.proxy.ProxyUtil;
import com.xxx.pubfound.MultipleDataSource;
import com.xxx.pubfound.aop.anntion.DataSourceType;

/***
 * 
* Project Name:gfw-public-foundation-impl
* <p>多数据源自动切换通知类(拦截com.xxx.pubfound.dao中所有的类中的方法)<br>
* 首先判断当前类是否被该DataSourceType注解进行注释,如果没有采用默认的uam数据源配置;<br>
* 如果有,则读取注解中的value值,将数据源切到value指定的数据源
* @ClassName: MultipleDataSourceAspectAdvice
* @date 2018年5月18日 下午5:13:51
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
@Component
@Aspect
public class MultipleDataSourceAop {

 private final Logger logger = LoggerFactory.getLogger(getClass());


 /***
 * 
 * Project Name gfw-public-foundation-impl
 * <p>
 * 拦截 pubfound.service中所有的方法,根据情况进行数据源切换
 *
 * @author youqiang.xiong
 * @date 2018年5月18日 下午5:49:48
 * @version v1.0
 * @since
 * @param pjp
 * 连接点
 * @throws Throwable
 * 抛出异常
 */
 @Before("execution(* com.xxx.pubfound.service.*.*(..))")
 public void changeDataSource(JoinPoint joinPoint) throws Throwable {

 // 拦截的实体类,就是当前正在执行的service
 Object target = ProxyUtil.getTarget(joinPoint.getTarget());
 if(target.getClass().isAnnotationPresent(DataSourceType.class)){
 DataSourceType dataSourceType = target.getClass().getAnnotation(DataSourceType.class);
 String type = dataSourceType.value();
 logger.info("数据源切换至--->{}",type);
 MultipleDataSource.setDataSourceKey(type);
 }else{
 logger.info("此{}不涉及数据源操作.",target.getClass());
 }

 }
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65

以上就是通过spring aop 切面实现多数据源自动切换的代码和配置,请根据各各自实际情况稍作修改.

spring+mybatis 实现多数据源切换

相关推荐