整体思路:使用Redis做缓存,缓存策略为首次查询从数据库抓取,取出后存入Redis,删除和更新操作在Redis中进行,并且定时同步至数据库;Lucene做全文检索,RAMDirectory和FSDirectory并用,事先在磁盘生成索引,启动后转为RAMDirectory,存取均在RAMDirectory进行,采用“定时从RAMDirectory向磁盘同步”的策略。
public class RedisManager {
@Autowired
private ArticleMapper articleMapper;
@Autowired
private RedisTemplate redisTemplate;
//取数据
public Article getArticle(Long id){
if(id == null){
return null;
}
ValueOperations valueOper = redisTemplate.opsForValue();
Object value = valueOper.get("article:"+id);
if(value == null||!(value instanceof Article)){
return null;
}
return (Article)value;
}
//存数据
public void setArticle(Long id,Article value){
if(id == null||value == null){
return ;
}
ValueOperations< String, Object> valueOper = redisTemplate.opsForValue();
valueOper.set( "article:"+String.valueOf(id) , value , 48 , TimeUnit.HOURS);//48小时失效
}
//删除数据
public void delArticle(Long id){
redisTemplate.delete("article:"+String.valueOf(id));
}
//保存待删除id,定时同步至数据库
public void addDelKey(Long id){
ListOperations listOper = redisTemplate.opsForList();
listOper.rightPush("to_delete_keys", "article:"+String.valueOf(id));
}
//判断是否当前key是否已被删除
public boolean isDeletedKey(Long id) {
ListOperations listOper = redisTemplate.opsForList();
List keys = listOper.range("to_delete_keys", 0, -1);
if(keys.contains("article:"+String.valueOf(id))){
return true;
}
return false;
}
//保存待更新id,定时同步至数据库
public void addUpdateKey(Long id){
ListOperations listOper = redisTemplate.opsForList();
listOper.rightPush("to_update_keys", "article:"+String.valueOf(id));
}
//定时任务:同步缓存至数据库
public void redisSync(){
ValueOperations valueOper = redisTemplate.opsForValue();
ListOperations listOper = redisTemplate.opsForList();
//同步更新
List keys = listOper.range("to_update_keys", 0, -1);
Iterator iterator = keys.iterator();
while (iterator.hasNext()) {
String key = (String)iterator.next();
Article article =(Article)valueOper.get(key);
articleMapper.updateArticle(article);
}
redisTemplate.delete("to_update_keys");
//同步删除
keys = listOper.range("to_delete_keys", 0, -1);
iterator = keys.iterator();
while (iterator.hasNext()) {
String key = (String)iterator.next();
articleMapper.deleteArticle(Long.parseLong(key.split(":")[1]));
}
redisTemplate.delete("to_delete_keys");
System.out.println("-----redis同步完成------");
}
}
取数据:
@Override
public Article findArticleById(Long id) {
if(redisManager.isDeletedKey(id)){
return null;
}
Article article = redisManager.getArticle(id);
if(article == null){
article = articleMapper.findArticleById(id);
redisManager.setArticle(id, article);
System.out.println("------数据库中取出!");
}
return article;
}
删除数据 :
@Override
public void deleteArticle(Long id) {
try {
redisManager.delArticle(id);//从redis移除
luceneManager.deleteDocument(id);//删除索引
redisManager.addDelKey(id);//加入待删除队列
} catch (Exception e) {
e.printStackTrace();
}
}
更新数据 :
@Override
public void updateArticle(Article article) {
try {
redisManager.setArticle(article.getId(), article);
luceneManager.updateDocument(article);//更新索引
redisManager.addUpdateKey(article.getId());//加入待更新队列
} catch (Exception e) {
e.printStackTrace();
}
}
添加数据时不经过缓存:
public void addArticle(Article article) {
try {
articleMapper.addArticle(article);
luceneManager.addDocument(article);
redisManager.setArticle(article.getId(), article);
} catch (Exception e) {
e.printStackTrace();
}
}
lucene相关jar包下载
public class LuceneUtil {
private static final String INDEXPATH="D:\\lucene";
private static RAMDirectory ramDirectory;
private static IndexWriter ramWriter;
@Autowired
private ArticleMapper articleMapper;
static{
try {
FSDirectory fsDirectory = FSDirectory.open(Paths.get(INDEXPATH));
ramDirectory = new RAMDirectory(fsDirectory,IOContext.READONCE);
fsDirectory.close();
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(
new SmartChineseAnalyzer());//中文分词器
indexWriterConfig.setIndexDeletionPolicy(
new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()));
ramWriter = new IndexWriter(ramDirectory, indexWriterConfig);
} catch (IOException e) {
e.printStackTrace();
}
}
//于磁盘创建索引
public void reCreatIndex(){
try {
Path path = Paths.get(INDEXPATH);
//删除原有索引文件
for (File file : path.toFile().listFiles()) {
file.delete();
}
FSDirectory fsDirectory = FSDirectory.open(path);
Analyzer analyzer = new SmartChineseAnalyzer();//中文分词器
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer);
IndexWriter writer = new IndexWriter(fsDirectory, indexWriterConfig);
List articles = articleMapper.findAll();
for (Article article : articles) {
writer.addDocument(toDocument(article));
}
writer.close();
System.out.println("-----创建索引成功---");
} catch (Exception e) {
e.printStackTrace();
}
}
//实体article对象转document索引对象
public Document toDocument(Article article){
Document doc = new Document();
doc.add(new StringField("id",String.valueOf(article.getId()),Field.Store.YES));
doc.add(new Field("title", article.getTitle(), TextField.TYPE_STORED));
doc.add(new Field("details", article.getDetails(),TextField.TYPE_STORED));
return doc;
}
//添加索引
public synchronized void addDocument(Article article) throws IOException{
ramWriter.addDocument(toDocument(article));
ramWriter.commit();
}
//删除索引
public synchronized void deleteDocument(Long id) throws IOException{
Term term = new Term("id",String.valueOf(id));
ramWriter.deleteDocuments(term);
ramWriter.commit();
}
//搜索
public List search(String keyword) throws IOException, ParseException, InvalidTokenOffsetsException{
List list = new ArrayList();
IndexSearcher indexSearcher = new IndexSearcher(DirectoryReader.open(ramDirectory));
String [] fields = {"title","details"};
Analyzer analyzer = new SmartChineseAnalyzer();
QueryParser queryParser = new MultiFieldQueryParser(fields, analyzer);
Query query = queryParser.parse(keyword);
// BooleanClause.Occur[] clauses = {BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD};
// Query query = MultiFieldQueryParser.parse(keyword, fields, clauses, analyzer);
TopDocs hits = indexSearcher.search(query, 20);
//高亮
SimpleHTMLFormatter simpleHTMLFormatter = new SimpleHTMLFormatter("","");
Highlighter highlighter = new Highlighter(simpleHTMLFormatter, new QueryScorer(query));
for(ScoreDoc scoreDoc:hits.scoreDocs){
Article article = new Article();
Document doc = indexSearcher.doc(scoreDoc.doc);
article.setId(Long.valueOf(doc.get("id")));
String title = doc.get("title");
String details = doc.get("details");
article.setTitle(highlighter.getBestFragment(analyzer.tokenStream("title", new StringReader(title)), title));
article.setDetails(highlighter.getBestFragment(analyzer.tokenStream("details", new StringReader(details)), details));
list.add(article);
}
return list;
}
//更新索引
public void updateDocument(Article article) throws IOException{
Term term = new Term("id",String.valueOf(article.getId()));
ramWriter.updateDocument(term, toDocument(article));
ramWriter.commit();
}
//同步索引至磁盘
public void indexSync(){
IndexWriterConfig config = null;
SnapshotDeletionPolicy snapshotDeletionPolicy = null;
IndexCommit indexCommit = null;
try {
config = (IndexWriterConfig) ramWriter.getConfig();
snapshotDeletionPolicy = (SnapshotDeletionPolicy) config.getIndexDeletionPolicy();
indexCommit = snapshotDeletionPolicy.snapshot();
config.setIndexCommit(indexCommit);
Collection fileNames = indexCommit.getFileNames();
Path toPath = Paths.get(INDEXPATH);
Directory toDir = FSDirectory.open(toPath);
//删除所有原有索引文件
for (File file : toPath.toFile().listFiles()) {
file.delete();
}
//从ramdir复制新索引文件至磁盘
for (String fileName : fileNames) {
toDir.copyFrom(ramDirectory, fileName, fileName, IOContext.DEFAULT);
}
toDir.close();
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("-----索引同步完成------");
}
}
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:p="http://www.springframework.org/schema/p"
xmlns:mvc="http://www.springframework.org/schema/mvc"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd
http://www.springframework.org/schema/mvc http://www.springframework.org/schema/mvc/spring-mvc.xsd">
<context:property-placeholder location="classpath*:properties/*.properties"/>
<bean id="poolConfig" class="redis.clients.jedis.JedisPoolConfig">
<property name="maxIdle" value="${redis.maxIdle}"/>
<property name="maxWaitMillis" value="${redis.maxWait}"/>
<property name="testOnBorrow" value="${redis.testOnBorrow}"/>
bean>
<bean id="connectionFactory"
class="org.springframework.data.redis.connection.jedis.JedisConnectionFactory">
<property name="poolConfig" ref="poolConfig"/>
<property name="hostName" value="${redis.host}"/>
<property name="port" value="${redis.port}"/>
<property name="password" value="${redis.auth}"/>
<property name="timeout" value="${redis.timeout}"/>
bean>
<bean id="redisTemplate" class="org.springframework.data.redis.core.RedisTemplate">
<property name="connectionFactory" ref="connectionFactory" />
<property name="keySerializer" >
<bean class="org.springframework.data.redis.serializer.StringRedisSerializer" />
property>
<property name="valueSerializer" >
<bean class="org.springframework.data.redis.serializer.JdkSerializationRedisSerializer" />
property>
bean>
<bean id="dataSource" class="com.alibaba.druid.pool.DruidDataSource">
<property name="driverClassName" value="${mysql.driver}"/>
<property name="url" value="${mysql.url}"/>
<property name="username" value="${mysql.user}"/>
<property name="password" value="${mysql.password}"/>
<property name="initialSize" value="${druid.initialSize}"/>
<property name="minIdle" value="${druid.minIdle}"/>
<property name="maxActive" value="${druid.maxActive}"/>
<property name="maxWait" value="${druid.maxWait}"/>
bean>
<bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="configLocation" value="classpath:config/mybatis-config.xml" />
bean>
<bean class="org.mybatis.spring.mapper.MapperScannerConfigurer">
<property name="basePackage" value="com.search.mapper" />
<property name="sqlSessionFactoryBeanName" value="sqlSessionFactory" />
bean>
<context:component-scan base-package="com.search.service"/>
<bean id="redisManager" class="com.search.utils.RedisManager"/>
<bean id="luceneManager" class="com.search.utils.LuceneUtil"/>
<bean id="redisSync" class="org.springframework.scheduling.quartz.MethodInvokingJobDetailFactoryBean">
<property name="targetObject" ref="redisManager"/>
<property name="targetMethod" value="redisSync"/>
bean>
<bean id="redisTrigger"
class="org.springframework.scheduling.quartz.CronTriggerFactoryBean">
<property name="jobDetail" ref="redisSync" />
<property name="cronExpression" value="0 0 0 * * ?" />
bean>
<bean id="indexSync" class="org.springframework.scheduling.quartz.MethodInvokingJobDetailFactoryBean">
<property name="targetObject" ref="luceneManager"/>
<property name="targetMethod" value="indexSync"/>
bean>
<bean id="luceneTrigger" class="org.springframework.scheduling.quartz.SimpleTriggerFactoryBean">
<property name="jobDetail" ref="indexSync" />
<property name="startDelay" value="60000" />
<property name="repeatInterval" value="7200000" />
bean>
<bean class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="triggers">
<list>
<ref bean="redisTrigger" />
<ref bean="luceneTrigger" />
list>
property>
bean>
beans>
转至:https://github.com/crazylai1996/article-search