from sqlalchemy import create_engine engine = create_engine('sqlite:///:memory:')
with engine.connect() as conn, conn.begin():
data = pd.read_sql_table('data', conn)
data.to_sql('data', engine)
数据要是比较大可以这样
data.to_sql('data_chunked', engine, chunksize=1000)
指定数据类型
from sqlalchemy.types import String
data.to_sql('data_dtype', engine, dtype={'Col_1': String})
# Alternative to_sql() *method* for DBs that support COPY FROM
import csv
from io import StringIO
def psql_insert_copy(table, conn, keys, data_iter):
# gets a DBAPI connection that can provide a cursor
dbapi_conn = conn.connection
with dbapi_conn.cursor() as cur:
s_buf = StringIO()
writer = csv.writer(s_buf)
writer.writerows(data_iter)
s_buf.seek(0)
columns = ', '.join('"{}"'.format(k) for k in keys)
if table.schema:
table_name = '{}.{}'.format(table.schema, table.name)
else:
table_name = table.name
sql = 'COPY {} ({}) FROM STDIN WITH CSV'.format(
table_name, columns)
cur.copy_expert(sql=sql, file=s_buf)
pd.read_sql_table('data', engine, parse_dates=['Date'])
pd.read_sql_table('data', engine, parse_dates={'Date': '%Y-%m-%d'})
pd.read_sql_table('data', engine,parse_dates={'Date': {'format': '%Y-%m-%d %H:%M:%S'}})
pd.read_sql_query('SELECT * FROM data', engine)
pd.read_sql_query("SELECT id, Col_1, Col_2 FROM data WHERE id = 42;", engine)
for chunk in pd.read_sql_query("SELECT * FROM data_chunks",engine, chunksize=5): print(chunk)
from pandas.io import sql
sql.execute('SELECT * FROM table_name', engine)
sql.execute('INSERT INTO table_name VALUES(?, ?, ?)', engine,
params=[('id', 1, 12.2, True)])
from sqlalchemy import create_engine
engine = create_engine('postgresql://scott:tiger@localhost:5432/mydatabase')
engine = create_engine('mysql+mysqldb://scott:tiger@localhost/foo')
engine = create_engine('oracle://scott:[email protected]:1521/sidname')
engine = create_engine('mssql+pyodbc://mydsn')
# sqlite:///
# where is relative:
engine = create_engine('sqlite:///foo.db')
# or absolute, starting with a slash:
engine = create_engine('sqlite:absolute/path/to/foo.db')