mysql连接池, 连接的释放, 连接的回收, 连接释放机制

#!/bin/python

from sqlalchemy.pool import QueuePool
import threading
import MySQLdb
import time

def creator():
    c = MySQLdb.connect(host="192.168.41.76", db="test", user="root", passwd="root", port=3306)
    return c

pool = QueuePool(creator, pool_size=2, max_overflow=0, timeout=1)

class Demo(object):

    def proc(self):
        ident = threading.currentThread().getName()
        conn = pool.connect()
        print ident, id(conn), id(conn.connection)
        cur = conn.cursor()
        cur.execute("select * from t_user")
        res = cur.fetchall()
        # conn.close()
        time.sleep(2)
    def __del__(self):
        print "__del__"

def target():
    while True:
        try:
            ident = threading.currentThread().getName()
            d = Demo()
            d.proc()
            # conn = pool.connect()
            # print ident, id(conn)
            # cur = conn.cursor()
            # cur.execute("select * from t_user")
            # res = cur.fetchall() 
            # # conn.close()
            # time.sleep(2)
        except Exception,e:
             print ident, e

def main():
    for i in range(3):
        t = threading.Thread(target=target)
        t.setDaemon(True)
        t.start()
    while True:
        try:
            time.sleep(10)
        except:
            import sys,traceback
            print traceback.format_exc()
            sys.exit(1)

if __name__ == "__main__":
    main()

连接池sqlalchemy使用模拟引发下面问题: 

线程数小于连接数, 当连接数耗光, 不至于所有的线程都堵死在获取连接上, 而是立马报错连接不够

连接获取之后, 如果打开防火墙屏蔽ip, 会阻塞在execute上面, 这个是mysql的特性

这种情况下, 连接是可以自动释放回收的, 即使不写conn.close()

如果屏蔽Demo类的使用, 打开下面注释, 不显示写conn.close() 则连接无法释放回收



下面代码解释自动回收conn

# coding=gbk
#!/bin/python

import weakref
import threading
import Queue
import time
import MySQLdb



class ObjFairy(object):
    """封装相关对象, 方便回收的时候操作
    """
    def __init__(self, _pool, _echo):
        self.pool = _pool
        self.echo = _echo

def callback(ref):
    """弱引用在gc的时候callback, ref为回收对象
    """
    print "gc callback, do clean", ref

class Pool(object):
    """xx池(比如sqlalchemy的QueuePool)
    """

    @staticmethod
    def checkout():
        fairy = ObjFairy(None, None)
        current = weakref.ref(fairy, callback)
        return current



# =========测试========== #
class DemoBusi(object):
    """业务类
    """

    def proc(self):
        current = Pool.checkout()  # 取出某可重复使用对象,  无需显示清理

def target():
    while True:
        demo = DemoBusi()  # 模拟业务handler, 一次请求过来, new BusiHandler
        demo.proc()
        # current = Pool.checkout(None)
        time.sleep(1)

def main():
    for i in range(3):
        t = threading.Thread(target=target)
        t.setDaemon(True)
        t.start()

    while True:
        time.sleep(2 * 60)
        break

if __name__ == "__main__":
    main()



sqlalchemy连接池源码

@classmethod
def checkout(cls, pool):
	rec = pool._do_get()
	try:
	    dbapi_connection = rec.get_connection()
	except:
	    with util.safe_reraise():
		rec.checkin()
	echo = pool._should_log_debug()
	fairy = _ConnectionFairy(dbapi_connection, rec, echo)
	rec.fairy_ref = weakref.ref(
	    fairy,
	    lambda ref: _finalize_fairy and
	    _finalize_fairy(
		dbapi_connection,
		rec, pool, ref, echo)
	)
	_refs.add(rec)
	if echo:
	    pool.logger.debug("Connection %r checked out from pool",
			      dbapi_connection)
	return fairy


跟踪DBUtils.py里面的PooledDB源码发现, 3个线程2个连接会导致所有的线程阻塞,是因为在建立连接或者获取连接的时候阻塞, 导致重入锁不释放(RLock)

下面模拟代码

#coding=utf8

import threading
from threading import Condition
import time

class Obj(object):

    def __init__(self):
        self.bt = time.time()
        self.cond = Condition()

obj = Obj()
class Demo(object):
    def __init__(self):
        pass

    def proc(self):
        et = time.time()
        if et - obj.bt > 6:
            print "block(%s)" % threading.currentThread().getName()
            raw_input()
        time.sleep(1)
        print "proc(%s)" % threading.currentThread().getName()

def f():
    while 1:
        print "----acquire(%s)" % threading.currentThread().getName()
        obj.cond.acquire()
        Demo().proc()
        obj.cond.release()
        time.sleep(0.1)
        print "-----release(%s)" % threading.currentThread().getName()
        

def main():
    for i in range(3):
        t = threading.Thread(target=f)
        t.setDaemon(True)
        time.sleep(2)
        t.start()
    time.sleep(10000)

if __name__ == "__main__":
    main()


这个是源码

self._condition.acquire()
try:
while (self._maxconnections
	and self._connections >= self._maxconnections):
    self._condition.wait()
# connection limit not reached, get a dedicated connection
try: # first try to get it from the idle cache
    con = self._idle_cache.pop(0)
except IndexError: # else get a fresh connection
    con = self.steady_connection()
else:
    con._ping_check() # check connection
con = PooledDedicatedDBConnection(self, con)
self._connections += 1
finally:
self._condition.release()


你可能感兴趣的:(python)