SpringBoot实现的JPA封装了JPA的特性,只需要写接口即可,但是有的时候约定的写法不符合我们的开发要求,没有很好的灵活性,这就需要我们自己去定义一下方法实现自己的封装Repository。
借鉴网上配置:
文章一:http://blog.csdn.net/u011659172/article/details/51537602
文章二:http://blog.csdn.net/xiao_xuwen/article/details/53579353
org.springframework.boot
spring-boot-starter-data-jpa
org.springframework.boot
spring-boot-starter-web
mysql
mysql-connector-java
package com.demo.dao;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.NoRepositoryBean;
/**
* @author
*
*/
@NoRepositoryBean
public interface BaseRepository extends JpaRepository{
public HashMap sqlQuery(String queryString, String countSql, Map values, int offset, int limit, String countName, String rowsName);
public List sqlQuery(String queryString, Map values);
public List sqlQuery(String queryString, Object ... values);
public HashMap retrieve(String queryString, String countHql, Map values, int offset, int limit, String countName, String rowsName);
}
package com.demo.dao;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.springframework.data.jpa.repository.support.SimpleJpaRepository;
import org.springframework.util.Assert;
/**
* dao的封装类.
*
*
* 封装一些涉及到原生sql的增删改查分页等功能
*
*
* @author xxx
* @since 1.0
*
*/
public class BaseRepositoryImpl extends SimpleJpaRepository implements BaseRepository {
//父类没有不带参数的构造方法,这里手动构造父类
public BaseRepositoryImpl(Class domainClass, EntityManager entityManager) {
super(domainClass, entityManager);
System.out.println("构造函数" + domainClass);
this.entityManager = entityManager;
this.entityClass = domainClass;
}
@PersistenceContext
private EntityManager entityManager;
private Class entityClass;
/**
* 查询分页的方法.
*
* 带着条件进行动态拼接sql的查询方法
*
*
*/
@Override
public HashMap sqlQuery(String queryString, String countSql, Map values, int offset,
int limit, String countName, String rowsName) {
Assert.hasText(queryString, "queryString不能为空");
HashMap map = new HashMap();
Query query = entityManager.createNativeQuery(queryString);
Query countQuery = entityManager.createNativeQuery(countSql);
//给条件赋上值
if (values != null && !values.isEmpty()) {
for (Map.Entry entry : values.entrySet()) {
query.setParameter(entry.getKey(), entry.getValue());
countQuery.setParameter(entry.getKey(), entry.getValue());
}
}
query.setFirstResult(offset);
query.setMaxResults(limit);
query.unwrap(SQLQuery.class).setResultTransformer(new BeanTransformerAdapter(this.entityClass));
Object results = query.getResultList();
Object resultsCount = countQuery.getSingleResult();
map.put(countName, resultsCount);
map.put(rowsName, results);
return map;
}
@Override
public List sqlQuery(String queryString, Map values) {
Session session = entityManager.unwrap(org.hibernate.Session.class);
SQLQuery query = session.createSQLQuery(queryString);
// //给条件赋上值
// if (values != null && !values.isEmpty()) {
// for (Map.Entry entry : values.entrySet()) {
// query.setParameter(entry.getKey(), entry.getValue());
// }
// }
if (values != null) {
query.setProperties(values);
}
query.setResultTransformer(new BeanTransformerAdapter(this.entityClass));
return query.list();
}
@Override
public List sqlQuery(String queryString, Object... values) {
Query query = entityManager.createNativeQuery(queryString);
// Session session = entityManager.unwrap(org.hibernate.Session.class);
// SQLQuery query = session.createSQLQuery(queryString);
if (values != null) {
for (int i = 0; i < values.length; i++) {
query.setParameter(i + 1, values[i]);
}
}
query.unwrap(SQLQuery.class).setResultTransformer(new BeanTransformerAdapter(this.entityClass));
return query.getResultList();
}
@Override
public HashMap retrieve(String queryString, String countHql, Map values, int offset,
int limit, String countName, String rowsName) {
HashMap map = new HashMap();
Query query = entityManager.createQuery(queryString);
Query countQuery = entityManager.createQuery(countHql);
//给条件赋上值
if (values != null && !values.isEmpty()) {
for (Map.Entry entry : values.entrySet()) {
query.setParameter(entry.getKey(), entry.getValue());
countQuery.setParameter(entry.getKey(), entry.getValue());
}
}
query.setFirstResult(offset);
query.setMaxResults(limit);
Object results = query.getResultList();
Object resultsCount = countQuery.getSingleResult();
map.put(countName, resultsCount);
map.put(rowsName, results);
return map;
}
}
package com.demo.dao;
import java.beans.PropertyDescriptor;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.transform.ResultTransformer;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.NotWritablePropertyException;
import org.springframework.beans.PropertyAccessorFactory;
import org.springframework.beans.TypeMismatchException;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.support.JdbcUtils;
import org.springframework.util.StringUtils;
public class BeanTransformerAdapter
implements ResultTransformer
{
private static final long serialVersionUID = 8149706114474043039L;
protected final Log logger = LogFactory.getLog(getClass());
private Class mappedClass;
private boolean checkFullyPopulated = false;
private boolean primitivesDefaultedForNullValue = false;
private Map mappedFields;
private Set mappedProperties;
public BeanTransformerAdapter()
{
}
public BeanTransformerAdapter(Class mappedClass)
{
initialize(mappedClass);
}
public BeanTransformerAdapter(Class mappedClass, boolean checkFullyPopulated)
{
initialize(mappedClass);
this.checkFullyPopulated = checkFullyPopulated;
}
public void setMappedClass(Class mappedClass)
{
if (this.mappedClass == null) {
initialize(mappedClass);
}
else if (!this.mappedClass.equals(mappedClass))
throw new InvalidDataAccessApiUsageException("The mapped class can not be reassigned to map to " +
mappedClass + " since it is already providing mapping for " + this.mappedClass);
}
protected void initialize(Class mappedClass)
{
this.mappedClass = mappedClass;
this.mappedFields = new HashMap();
this.mappedProperties = new HashSet();
PropertyDescriptor[] pds = BeanUtils.getPropertyDescriptors(mappedClass);
for (PropertyDescriptor pd : pds)
if (pd.getWriteMethod() != null) {
this.mappedFields.put(pd.getName().toLowerCase(), pd);
String underscoredName = underscoreName(pd.getName());
if (!pd.getName().toLowerCase().equals(underscoredName)) {
this.mappedFields.put(underscoredName, pd);
}
this.mappedProperties.add(pd.getName());
}
}
private String underscoreName(String name)
{
if (!StringUtils.hasLength(name)) {
return "";
}
StringBuilder result = new StringBuilder();
result.append(name.substring(0, 1).toLowerCase());
for (int i = 1; i < name.length(); i++) {
String s = name.substring(i, i + 1);
String slc = s.toLowerCase();
if (!s.equals(slc))
result.append("_").append(slc);
else {
result.append(s);
}
}
return result.toString();
}
public final Class getMappedClass()
{
return this.mappedClass;
}
public void setCheckFullyPopulated(boolean checkFullyPopulated)
{
this.checkFullyPopulated = checkFullyPopulated;
}
public boolean isCheckFullyPopulated()
{
return this.checkFullyPopulated;
}
public void setPrimitivesDefaultedForNullValue(boolean primitivesDefaultedForNullValue)
{
this.primitivesDefaultedForNullValue = primitivesDefaultedForNullValue;
}
public boolean isPrimitivesDefaultedForNullValue()
{
return this.primitivesDefaultedForNullValue;
}
protected void initBeanWrapper(BeanWrapper bw)
{
}
protected Object getColumnValue(ResultSet rs, int index, PropertyDescriptor pd)
throws SQLException
{
return JdbcUtils.getResultSetValue(rs, index, pd.getPropertyType());
}
public static BeanPropertyRowMapper newInstance(Class mappedClass)
{
BeanPropertyRowMapper newInstance = new BeanPropertyRowMapper();
newInstance.setMappedClass(mappedClass);
return newInstance;
}
public Object transformTuple(Object[] tuple, String[] aliases)
{
Object mappedObject = BeanUtils.instantiate(this.mappedClass);
BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(mappedObject);
initBeanWrapper(bw);
Set populatedProperties = isCheckFullyPopulated() ? new HashSet() : null;
for (int i = 0; i < aliases.length; i++) {
String column = aliases[i];
PropertyDescriptor pd = (PropertyDescriptor)this.mappedFields.get(column.replaceAll(" ", "").toLowerCase());
if (pd == null) continue;
try {
Object value = tuple[i];
try {
bw.setPropertyValue(pd.getName(), value);
} catch (TypeMismatchException e) {
if ((value == null) && (this.primitivesDefaultedForNullValue))
this.logger.debug("Intercepted TypeMismatchException for column " + column + " and column '" +
column + "' with value " + value + " when setting property '" + pd.getName() + "' of type " + pd.getPropertyType() +
" on object: " + mappedObject);
else {
throw e;
}
}
if (populatedProperties != null)
populatedProperties.add(pd.getName());
}
catch (NotWritablePropertyException ex) {
throw new DataRetrievalFailureException("Unable to map column " + column +
" to property " + pd.getName(), ex);
}
}
if ((populatedProperties != null) && (!populatedProperties.equals(this.mappedProperties))) {
throw new InvalidDataAccessApiUsageException("Given ResultSet does not contain all fields necessary to populate object of class [" +
this.mappedClass + "]: " + this.mappedProperties);
}
return mappedObject;
}
public List transformList(List list)
{
return list;
}
}
package com.demo.dao;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.support.JpaRepositoryFactory;
import org.springframework.data.jpa.repository.support.JpaRepositoryFactoryBean;
import org.springframework.data.repository.core.RepositoryInformation;
import org.springframework.data.repository.core.RepositoryMetadata;
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
import javax.persistence.EntityManager;
import java.io.Serializable;
/**
* Created by konghao on 2016/12/7.
*/
public class BaseRepositoryFactoryBean, T,
I extends Serializable> extends JpaRepositoryFactoryBean {
public BaseRepositoryFactoryBean(Class extends R> repositoryInterface) {
super(repositoryInterface);
}
@Override
protected RepositoryFactorySupport createRepositoryFactory(EntityManager em) {
return new BaseRepositoryFactory(em);
}
//创建一个内部类,该类不用在外部访问
private static class BaseRepositoryFactory
extends JpaRepositoryFactory {
private final EntityManager em;
public BaseRepositoryFactory(EntityManager em) {
super(em);
this.em = em;
}
//设置具体的实现类是BaseRepositoryImpl
@Override
protected Object getTargetRepository(RepositoryInformation information) {
return new BaseRepositoryImpl((Class) information.getDomainType(), em);
}
//设置具体的实现类的class
@Override
protected Class> getRepositoryBaseClass(RepositoryMetadata metadata) {
return BaseRepositoryImpl.class;
}
}
}
@SpringBootApplication
@EnableJpaRepositories(basePackages = {"com.redsoft"}, repositoryFactoryBeanClass = BaseRepositoryFactoryBean.class)//指定自己的工厂类
public class EPIPApplication extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(
SpringApplicationBuilder application) {
return application.sources(EPIPApplication.class);
}
public static void main(String[] args) throws InterruptedException {
ApplicationContext ctx = SpringApplication.run(EPIPApplication.class, args);
}
}