直接原因是 tcp 连接 是因为 Connection reset 分析: 很有可能是 druid 连接池,提供了一个已经失效的连接. 但如果已经失效会使用下一个连接的. 源代码: {
} catch (IOException ioEx) { throw SQLError.createCommunicationsException(this.connection, this.lastPacketSentTimeMs, this.lastPacketReceivedTimeMs, ioEx, getExceptionInterceptor());
Caused by: com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure The last packet successfully received from the server was 4,861 milliseconds ago. The last packet sent successfully to the server was 4,799 milliseconds ago. at sun.reflect.GeneratedConstructorAccessor280.newInstance(Unknown Source) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:526) at com.mysql.jdbc.Util.handleNewInstance(Util.java:411) at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:1121) at com.mysql.jdbc.MysqlIO.reuseAndReadPacket(MysqlIO.java:3670) at com.mysql.jdbc.MysqlIO.reuseAndReadPacket(MysqlIO.java:3559) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:4110) at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2570) at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2731) at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2815) at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:2155) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1379) at com.alibaba.druid.filter.FilterChainImpl.preparedStatement_execute(FilterChainImpl.java:2931) at com.alibaba.druid.filter.FilterEventAdapter.preparedStatement_execute(FilterEventAdapter.java:440) at com.alibaba.druid.filter.FilterChainImpl.preparedStatement_execute(FilterChainImpl.java:2929) at com.alibaba.druid.filter.FilterEventAdapter.preparedStatement_execute(FilterEventAdapter.java:440) at com.alibaba.druid.filter.FilterChainImpl.preparedStatement_execute(FilterChainImpl.java:2929) at com.alibaba.druid.proxy.jdbc.PreparedStatementProxyImpl.execute(PreparedStatementProxyImpl.java:118) at com.alibaba.druid.pool.DruidPooledPreparedStatement.execute(DruidPooledPreparedStatement.java:493) at org.apache.ibatis.executor.statement.PreparedStatementHandler.query(PreparedStatementHandler.java:56) at org.apache.ibatis.executor.statement.RoutingStatementHandler.query(RoutingStatementHandler.java:70) at org.apache.ibatis.executor.SimpleExecutor.doQuery(SimpleExecutor.java:57) at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:259) at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:132) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:105) at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:81) at sun.reflect.GeneratedMethodAccessor250.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.ibatis.plugin.Invocation.proceed(Invocation.java:46) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:57) at com.sun.proxy.$Proxy54.query(Unknown Source) at sun.reflect.GeneratedMethodAccessor250.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.ibatis.plugin.Invocation.proceed(Invocation.java:46) at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:57) at com.sun.proxy.$Proxy54.query(Unknown Source) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:104) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:98) at org.apache.ibatis.session.defaults.DefaultSqlSession.selectOne(DefaultSqlSession.java:62) at sun.reflect.GeneratedMethodAccessor253.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:354) ... 43 more Caused by: java.net.SocketException: Connection reset at java.net.SocketInputStream.read(SocketInputStream.java:196) at java.net.SocketInputStream.read(SocketInputStream.java:122) at com.mysql.jdbc.util.ReadAheadInputStream.fill(ReadAheadInputStream.java:114) at com.mysql.jdbc.util.ReadAheadInputStream.readFromUnderlyingStreamIfNecessary(ReadAheadInputStream.java:161) at com.mysql.jdbc.util.ReadAheadInputStream.read(ReadAheadInputStream.java:189) at com.mysql.jdbc.MysqlIO.readFully(MysqlIO.java:3116) at com.mysql.jdbc.MysqlIO.reuseAndReadPacket(MysqlIO.java:3570) ... 87 more
文章1:
http://www.jb51.net/article/34888.htm http://www.jb51.net/article/34888.htm
很大的一个原因除了 服务端断开连接外, 还有就是 业务本身请求的连接太多了.
文章2: Connection reset的一种 case. 对方已关闭.
* jdbc pool : lhttp://tomcat.apache.org/tomcat-7.0-doc/jdbc-pool.html (TheJDBC Connection Poolorg.apache.tomcat.jdbc.pool is a replacement or an alternative to theApache Commons DBCP connection pool. )
* druid的中文配置说明 (配置项和dbcp是一致的),不如英文来的明了。” https://github.com/alibaba/druid/wiki/DruidDataSource配置属性列表 “
jdbc连接池连接过多且又空闲不使用就需要进行回收。这个不管是数据库连接池还是java线程池都有一样的诉求。
jdbc连接池不同于线程池的是,连接有可能提前被数据库关闭掉。这个将导致后续的请求使用该连接的时候抛错或者 通过重连导致超时。
故为解决该问题需要配置配置。
<property name="testWhileIdle" value="true" />
<
property
name=
"timeBetweenEvictionRunsMillis"
value=
"${jdbc.idleConnectionTestPeriod}"
/>
<
property
name=
"minEvictableIdleTimeMillis"
value=
"${jdbc.maxIdleTime}"
/>
我们系统中目前设置的是jdbc.maxIdleTime=14400000 ,minIdle=20
14400000/3600/1000 = 4小时
我们的数据库目前设置为3分钟即断开。
故客户端连接池设置时间短一点:2.9分钟,会回收连接直到20.剩下的20个连接即使一直空闲也不回收,符合连接池的意义。但是数据库会把这20个连接关闭。 故每次获取连接时会发现连接失效! 依旧无法解决
抛错或者超时的问题。 怎么办?把 testWhileIld打开后,每次从连接池里获取连接,发现超过了
idleConnectionTestPeriod,就会进行检测.
if (isTestWhileIdle()) {
final long currentTimeMillis = System.currentTimeMillis();
final long lastActiveTimeMillis = poolableConnection.getConnectionHolder().getLastActiveTimeMillis();
final long idleMillis = currentTimeMillis - lastActiveTimeMillis;
long timeBetweenEvictionRunsMillis = this.getTimeBetweenEvictionRunsMillis();
if (timeBetweenEvictionRunsMillis <= 0) {
timeBetweenEvictionRunsMillis = DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS;
}
if (idleMillis >= timeBetweenEvictionRunsMillis) {
boolean validate = testConnectionInternal(poolableConnection.getConnection());
if (!validate) {
if (LOG.isDebugEnabled()) {
LOG.debug("skip not validate connection.");
}
discardConnection(realConnection);
continue;
}
}
}
只能查看druid的监控,查看峰值活跃数。不停调整值
{ "url": "jdbc:mysql://xxxx?characterEncoding=UTF-8", "dbType": "mysql", "name": "DataSource-4129454", "activeCount": 0, "activePeak": 4, "activePeakTime": "2016-02-16 00:00:03", "poolingCount": 5, "poolingPeak": 6, "poolingPeakTime": "2016-02-15 23:59:25", "connectCount": 8788, "closeCount": 8787, "executeCount": 9757, "commitCount": 485, "pstmtCacheHitCount": 9753, "pstmtCacheMissCount": 4, "startTransactionCount": 485, "transactionHistogram": [ 0, 485 ], "connectionHoldTimeHistogram": [ 8274, 513 ], ]}
{ "url": "jdbc:mysql://xxx?characterEncoding=UTF-8", "dbType": "mysql", "name": "DataSource-4129454", "activeCount": 0, "activePeak": 4, "activePeakTime": "2016-02-15 22:00:10", "poolingCount": 4, "poolingPeak": 4, "poolingPeakTime": "2016-02-15 21:59:13", "connectCount": 9354, "closeCount": 9354, "executeCount": 9962, "errorCount": 1, "commitCount": 305, "pstmtCacheHitCount": 9956, "pstmtCacheMissCount": 6, "startTransactionCount": 305, "transactionHistogram": [ 0, 305 ], "connectionHoldTimeHistogram": [ 8999, 353, 2 ]}