python多线程向mysql中写入数据

项目需要模拟一个量级数据量场景,来对相关查询接口的查询性能进行测试。
使用mysql数据库,原来写的一个简单的mysql脚本。

#coding=utf-8
import pymysql

def sfzh_sql_insert():
    conn = pymysql.connect(host='172.31.100.11', user='root', password="root",
                           database='fk_test_db', port=3306,
                           charset='utf8')
    cus= conn.cursor()
    try:
        for i in range(100000):
            sql=("INSERT INTO `fk_test_db`.`test_table`  VALUES ('%s')")%i
            #tlock.acquire()
            ok = cus.execute(sql)
            conn.commit()
            print (ok)
            #tlock.release()
    except Exception as e:
        print ("one error happen",e)
    finally:
        cus.close()
        conn.close()

后来发现脚本性能不足,插入10w数据需要花费半个小时,因此考虑进行多线程改造

#coding=utf-8
import pymysql
import threading
def sfzh_sql_insert():
    conn = pymysql.connect(host='172.31.100.11', user='root', password="root",
                           database='fk_test_db', port=3306,
                           charset='utf8')
    cus= conn.cursor()
    #id = int(id)
    try:
        for i in range(1000):
            sql=("INSERT INTO `fk_test_db`.`test_table`  VALUES ('%s')")%i
            #tlock.acquire()
            ok = cus.execute(sql)
            conn.commit()
            print (ok)
            #tlock.release()
    except Exception as e:
        print ("one error happen",e)
    finally:
        pass


class myThread(threading.Thread):
    def __init__(self,id):
        threading.Thread.__init__(self)
        self.id = id
        pass
    def run(self):
        sfzh_sql_insert(id)
        #print ("开始操作%s"%i)


threads =[]
tlock=threading.Lock()
for  i in range(100):
    thread = myThread(i)
    threads.append(thread)

for i in range(len(threads)):
    threads[i].start()

改造后100个线程去跑 1分钟完成

你可能感兴趣的:(mysql)