jdbc批量插入,通过csv文件多数据拼一条sql执行。

因以后工作可能需要,写了一个小工具类 , 博客记录下。

原理是通过读取csv文件进行sql语句的拼串,将要插入的多数据拼成一条来执行,效率比多条执行要高很多。

测试了一下,10W条数据(简单的测试数据)用传统的PrepareStatement的addBatch()方法1000条执行一次,耗时7秒左右。

现在通过拼串的方法将1000条数据拼成一条sql 10W条下来用时1秒多。

工具的参数写的有点死,结合实际的时候需要改动。而且还有好多可以优化的地方。希望大家指正。

package db;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

/**
 * 利用csv文件拼一条sql语句进行数据库插入
 * 
 * @author cuijk
 * @date 2014-12-8
 * 
 */
public class SqlUtil {

    private static String driver = "com.mysql.jdbc.Driver";
    private static String url = "jdbc:mysql://localhost:3306/test?autoReconnect=true&useUnicode=true&characterEncoding=utf-8";
    private static String user = "root";
    private static String pwd = "root";
    private static String filePath = "d://test.csv";
    // 读取文件时要用的字符编码
    private static String charset = "UTF-8";
    // sql前后缀
    private static String preSql = "INSERT INTO users(username,pwd) VALUES ";
    // 参数
    private static String sufSql = "(?,?),";

    public static void main(String[] args) {
        int i = insert(driver, url, user, pwd, preSql, sufSql, filePath,
                charset);
        System.out.println(i);
    }

    /**
     * 
     * @param driver
     *            数据库驱动
     *            <p>
     *            com.mysql.jdbc.Driver
     * @param url
     *            数据库url
     *            <p>
     *            jdbc:mysql://localhost:3306/test
     * @param user
     *            用户名
     * @param pwd
     *            密码
     * @param preSql
     *            sql语句前缀
     *            <p>
     *            INSERT INTO users(username,pwd) VALUES
     * @param sufSql
     *            sql语句后缀
     *            <p>
     *            (?,?),
     * @param filePath
     *            文件路径
     *            <p>
     *            d://test.csv
     * @param charset
     *            读取文件时所用的编码
     *            <p>
     *            UTF-8
     * @return
     * 
     */
    public static int insert(String driver, String url, String user,
            String pwd, String preSql, String sufSql, String filePath,
            String charset) {

        long start = System.currentTimeMillis();
        StringBuilder sqlSb = null;
        Connection conn = null;
        BufferedReader br = null;
        InputStreamReader isr = null;
        PreparedStatement pstm = null;
        int count = 0;
        int total = 0;
        try {
            String line = "";
            isr = new InputStreamReader(
                    new FileInputStream(new File(filePath)), charset);
            br = new BufferedReader(isr);
            // 获取非手动提交Connection
            conn = getConnection(driver, url, user, pwd);
            sqlSb = new StringBuilder(preSql);

            List<String> list1 = new ArrayList<String>();
            List<String> list2 = new ArrayList<String>();

            while ((line = br.readLine()) != null) {
                count++;
                total++;
                sqlSb.append(sufSql);

                list1.add(line.substring(0, line.indexOf(",")));
                list2.add(line.substring(line.indexOf(",") + 1, line.length()));

                if (count >= 1000) {
                    // 去除sql最后的一个',';
                    sqlSb.delete(sqlSb.length() - 1, sqlSb.length());
                    pstm = conn.prepareStatement(sqlSb.toString());

                    // 填充占位符
                    for (int i = 0; i < count; i++) {
                        pstm.setString(i * 2 + 1, list1.get(i));
                        pstm.setString(i * 2 + 2, list2.get(i));
                    }

                    pstm.execute();// 执行一条sql语句
                    conn.commit();
                    System.out.println("处理 : " + count + "条记录。");

                    // 处理完1000条就清空一次
                    sqlSb = new StringBuilder(preSql);

                    count = 0;
                    list1.clear();
                    list2.clear();
                }
            }
            if (count > 0) {
                // 清理sql语句
                sqlSb = new StringBuilder(preSql);

                for (int i = 0; i < count; i++) {
                    sqlSb.append(sufSql);
                }
                // 删除最后一个','
                sqlSb.delete(sqlSb.length() - 1, sqlSb.length());

                pstm = conn.prepareStatement(sqlSb.toString());

                for (int i = 0; i < count; i++) {
                    pstm.setString(i * 2 + 1, list1.get(i));
                    pstm.setString(i * 2 + 2, list2.get(i));
                }

                pstm.execute();
                pstm.clearBatch();
                System.out.println("处理 : " + count + "条数据");
                conn.commit();
            }
            list1.clear();
            list2.clear();

            double time = ((double) (System.currentTimeMillis() - start)) / 1000;
            System.out.println("处理完毕,共处理" + total + "条记录,花费 : " + time + "秒");
            return total;
        } catch (InstantiationException e) {
            e.printStackTrace();
        } catch (IllegalAccessException e) {
            e.printStackTrace();
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        } catch (SQLException e) {
            e.printStackTrace();
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if (br != null) {
                    br.close();
                }
                if (isr != null) {
                    isr.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
            closeConn(conn);
        }
        return 0;
    }

    /**
     * 获取链接,setAutoCommit(false),设置非自动提交
     * 
     * @param dbDriver
     * @param dbUrl
     * @param dbUser
     * @param dbPwd
     * @return
     * @throws InstantiationException
     * @throws IllegalAccessException
     * @throws ClassNotFoundException
     * @throws SQLException
     */
    private static Connection getConnection(String dbDriver, String dbUrl,
            String dbUser, String dbPwd) throws InstantiationException,
            IllegalAccessException, ClassNotFoundException, SQLException {

        Class.forName(dbDriver).newInstance();
        Connection conn = DriverManager.getConnection(dbUrl, dbUser, dbPwd);
        conn.setAutoCommit(false);
        return conn;

    }

    /**
     * 关闭链接
     * 
     * @param conn
     */
    private static void closeConn(Connection conn) {

        try {
            if (conn != null) {
                conn.close();
            }
        } catch (SQLException e) {
            e.printStackTrace();
            System.out.println("连接未正常关闭");
        }

    }

}


另外附带一个 输出CSV文件的简单实现:


package io;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.List;

public class CSVUtil {

    public static void main(String[] args) {
        int count = 100054;
        File file = new File("d://test.csv");
        long s = System.currentTimeMillis();

        List<String> dataList = new ArrayList<String>();
        for (int i = 0; i < count; i++) {
            dataList.add("测试一1,测试二2");
        }
        
        exportCsv(file, dataList);
        dataList.clear();
        
        double e = (double) System.currentTimeMillis();
        System.out.println((e - s) / 1000);
    }

    public static boolean exportCsv(File file, List<String> dataList) {
        boolean isSucess = false;
        FileOutputStream out = null;
        OutputStreamWriter osw = null;
        BufferedWriter bw = null;
        try {
            out = new FileOutputStream(file);
            osw = new OutputStreamWriter(out, "UTF-8");
            bw = new BufferedWriter(osw);
            if (dataList != null && !dataList.isEmpty()) {
                for (String data : dataList) {
                    // 为文件加入bom信息头
                    bw.write(new String(new byte[] { (byte) 0xEF, (byte) 0xBB,
                            (byte) 0xBF }));
                    bw.write(new String(data.getBytes("UTF-8")));
                    bw.newLine();
                }
                bw.flush();
            }
            isSucess = true;
        } catch (Exception e) {
            isSucess = false;
        } finally {
            if (bw != null) {
                try {
                    bw.close();
                    bw = null;
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if (osw != null) {
                try {
                    osw.close();
                    osw = null;
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if (out != null) {
                try {
                    out.close();
                    out = null;
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        return isSucess;
    }

    public static List<String> importCsv(File file) {
        List<String> dataList = new ArrayList<String>();
        BufferedReader br = null;
        try {
            br = new BufferedReader(new FileReader(file));
            String line = "";
            while ((line = br.readLine()) != null) {
                dataList.add(line);
            }
        } catch (Exception e) {
        } finally {
            if (br != null) {
                try {
                    br.close();
                    br = null;
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        return dataList;
    }
}


你可能感兴趣的:(批量插入,csv,数据库快速插入)