python数据批量插入postgreSQL数据库

1、executemany()方法批量输入数据到数据库

import pandas as pd
import psycopg2
from DBUtils.PooledDB import PooledDB
import math
'''
批量插入 
data:为dataframe数据,size:为批量大小
sql示例:  "insert into table(username,password,userid) values(%s,%s,%s)"
'''
def batchInsert(sql, data, size):
    try:
        psycopg_pool = PooledDB(psycopg2, mincached=5, blocking=True, user='postgres',
                                password='postgres', database='postgres', host='xxx.xxx.xx.xx',
                                port='5432')
        connection = psycopg_pool.connection()
        cursor = connection.cursor()
    except Exception as e:
            print(e)
   try:
       cycles = math.ceil(data.shape[0] / size)
       for i in range(cycles):
           val = data[i * size:(i + 1) * size].values
           cursor.executemany(sql, val)
           connection.commit()
   except Exception as e:
       print(e)
       connection.rollback()
   finally:
       connection.close()

def insert(data, table_name, size):##data is a DataFrame
    try:
        table_columns = str(list(pd.DataFrame(data.columns).astype(str)[0]))[1:-1].replace("'", "")##获取字段名 
        table_values = '%s'
        for i in range(0, len(data.columns) - 1):
            table_values = table_values + ',' + '%s'
        sql = "insert into {}({}) values ({})".format(table_name, table_columns, table_values)
        batchInsert(sql, data, size)
    except Exception as e:
        print(e)

2、datafame的to_sql()插入数据到数据库

from sqlalchemy import create_engine
result = pd.DataFrame(data)
engine = create_engine('postgresql://user:password@host:port/database')
pd.io.sql.to_sql(result, table_name, engine, index = False, if_exists='append')#增量入库
pd.io.sql.to_sql(result, table_name, engine, index = False, if_exists='replace')#覆盖入库

3、强大的copy_from(),是postgresSQ的内置函数

import psycopg2
from sqlalchemy import create_engine
import pandas as pd
from io import StringIO

data1 = pd.DataFrame(data)
#dataframe类型转换为IO缓冲区中的str类型
output = StringIO()
data1.to_csv(output, sep='\t', index=False, header=False)
output1 = output.getvalue()
  
conn = psycopg2.connect(host=***, user=***, password=***, database=***)
cur = conn.cursor()
cur.copy_from(StringIO(output1), table_name1)
conn.commit()
cur.close()
conn.close()
print('done')

第一种方法比第二种方法快一些,用copy_from()方法,相比于前两种方法执行效率更高,但是存在一个问题,就是对csv的格式要求,我的csv里面字段数据比较复杂,带有多种符号,用copy_from()方法报错psycopg2.DataError: missing data for column “xxx”,可以参考这里了解对csv格式要求

你可能感兴趣的:(postgres,python,python,postgres)