需要把String类型数据转换成Reader,然后再使用setCharacterStream插入数据库中。
例如下例中,要插入String longStr,则先转换成Byte[],再ByteArrayInputStream,最后InputStreamReader。
添加或更新clob型数据,如下所示(以更新为例):
PreparedStatement pstmt=conn.prepareStatement(“update tablename set column1=? “+条件语句);
byte[] bytes_zyjs = longStr.getBytes();
ByteArrayInputStream baisss = new ByteArrayInputStrea(bytes_zyjs);
InputStreamReader bais = new InputStreamReader(baisss);
pstmt.setCharacterStream(1,bais,bytes_zyjs.length);
pstmt.executeUpdate();
但是如上方式写入汉字就会产生乱码,于是查看资料得知,上述方法多用于oracle下,而mysql下使用的是setBinaryStream方法,只要传入位置,inputstream,和长度即可。示例如下:
byte[] cert_dataBytes = cert_data.getBytes();
ByteArrayInputStream bais1 = new ByteArrayInputStream(cert_dataBytes);
byte[] prikey_dataBytes = prikey_data.getBytes();
ByteArrayInputStream bais2 = new ByteArrayInputStream(prikey_dataBytes);
String sql = "insert into cert_data values(?,?,?)";
PreparedStatement pstm = null;
try {
conn.setAutoCommit(false);
pstm = conn.prepareCall(sql);
pstm.setInt(1,cert_sn);
pstm.setBinaryStream(2, bais1,cert_dataBytes.length);//使用二进制读取,可以直接写入汉字,否则容易产生乱码
pstm.setBinaryStream(3, bais2, prikey_dataBytes.length);
pstm.executeUpdate();
conn.commit();
conn.setAutoCommit(true);
pstm.close();
} catch (SQLException e) {
e.printStackTrace();
}finally{
try {
if(pstm != null)
pstm.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
//从数据库中读取Blob类型数据后,要转换成String类型,即转换成InputStream,再从InputStream转成byte[],再到String即可。如下:
//把数据库中blob类型转换成String类型
public String convertBlobToString(Blob blob){
String result = "";
try {
ByteArrayInputStream msgContent =(ByteArrayInputStream) blob.getBinaryStream();
byte[] byte_data = new byte[msgContent.available()];
msgContent.read(byte_data, 0,byte_data.length);
result = new String(byte_data);
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return result;
}
2批量插入 带blob类型的实体类
public void insertBatchList(ArrayList list) {
Connection conn = null;
PreparedStatement stmt = null;
MetadataMapVo vo = null;
try {
conn = DAOUtils.getDBConnection(JNDINames.CRM_DATASOURCE, this);
stmt = conn.prepareStatement(DAOSQLUtils.getFilterSQL(SQL_INSERT));
if (list != null && list.size() > 0) {
for (int i = 0; i < list.size(); i++) {
vo = (MetadataMapVo) list.get(i);
int index = 1;
stmt.setString( index++, ((MetadataMapVo)vo).getId() );
stmt.setString( index++, ((MetadataMapVo)vo).getInterc_type());
stmt.setString( index++, ((MetadataMapVo)vo).getInterc_metadata_id() );
stmt.setString( index++, ((MetadataMapVo)vo).getSource_type() );
stmt.setString( index++, ((MetadataMapVo)vo).getSource_id() );
stmt.setString( index++, ((MetadataMapVo)vo).getTarget_type() );
stmt.setString( index++, ((MetadataMapVo)vo).getTarget_id() );
String desc =((MetadataMapVo)vo).getDescription();
if(!StringUtil.isEmptyOrNull(desc)){
ByteArrayInputStream is = new ByteArrayInputStream(desc.getBytes());
stmt.setBinaryStream( index++, is,desc.getBytes().length);
}else{
stmt.setString( index++, null);
}
stmt.setString( index++, ((MetadataMapVo)vo).getRequeir_file_path() );
stmt.setString( index++, ((MetadataMapVo)vo).getStatus());
stmt.addBatch();
}
stmt.executeBatch();
}
} catch (SQLException se) {
Debug.print(SQL_INSERT, this);
throw new DAOSystemException("SQLException while insert sql:\n" + SQL_INSERT, se);
} finally {
DAOUtils.closeStatement(stmt, this);
DAOUtils.closeConnection(conn, this);
}
}
查询带blob类型的实体类
private String convertBLOBtoString(Object blobContent) {
try {
Class clazz = blobContent.getClass();
Method method = clazz.getMethod("getBinaryStream",new Class[] {});
InputStream is = (InputStream) method.invoke(blobContent,new Object[] {});
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int i=-1;
while((i=is.read())!=-1){
baos.write(i);
}
return baos.toString();
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
private Map rowToMap(ResultSet rs) throws Exception {
Map result = new HashMap();
ResultSetMetaData rsmd = rs.getMetaData();
int cols = rsmd.getColumnCount();
String type="";
for (int i = 1; i <= cols; i++) {
if(rsmd.getColumnName(i).toUpperCase().equals("SOURCE_TYPE")){
type = rs.getString(i);
}
if(rsmd.getColumnName(i).toUpperCase().equals("DESCRIPTION")){
result.put(rsmd.getColumnName(i).toLowerCase(), this.convertBLOBtoString(rs.getBlob(i)));;
}else{
result.put(rsmd.getColumnName(i).toLowerCase(), rs.getString(i));
}
}
if(!type.equals("")){
if("4".equals(type.trim())||"6".equals(type.trim())){
String xml=(String) result.get("description");
Document doc =DocumentHelper.parseText(xml);
Element root = doc.getRootElement();
String cal_function = root.attributeValue("cal_function");
String require_des = root.attributeValue("require_des");
result.put("cal_function", cal_function);
result.put("require_des", require_des);
}
}
return result;
}
@Override
public List getBooldData(Map map) throws Exception {
Connection conn = null;
PreparedStatement stmt = null;
ResultSet rs = null;
List<Map<String,String>> list = null;
String whereCond ="";
try {
whereCond =" where a.interc_type='"+map.get("interc_type")+"' and a.interc_metadata_id='"+map.get("interc_metadata_id")+"'";
conn = DAOUtils.getDBConnection(JNDINames.CRM_DATASOURCE, this);
stmt = conn.prepareStatement(DAOSQLUtils.getFilterSQL(SELECT_SQL + whereCond));
rs = stmt.executeQuery();
Map<String, String> m = null;
list = new ArrayList<Map<String,String>>();
while(rs.next()){
list.add(this.rowToMap(rs));
}
} catch (SQLException se) {
Debug.print(SELECT_SQL + whereCond, this);
throw new DAOSystemException("SQLException while getting sql:\n" + SELECT_SQL + whereCond, se);
} finally {
DAOUtils.closeResultSet(rs, this);
DAOUtils.closeStatement(stmt, this);
DAOUtils.closeConnection(conn, this);
}
System.out.println();
System.out.println(list.size());
return list;
}