封装一个使用Lucene API的通用搜索函数

	public interface DocFetcher<T> {
		public T get (Document doc);
	}
	private IndexSearcher searcher;
	private Analyzer analyzer;
	 /**
	 * @param fields - fields that this search bases on
	 * @param keywords 
	 * @param flags - indicates whether the search should use a QueryParser to parse the keywords for the specified fields or just use a TermQuery for exact match. the length of the flags array MUST be the same as that of fields
	 * @param filter - may be null
	 * @param sort - null if sorting the result is not required
	 * @return count of documents that this search has found 
	 */
	public <T> int search (String[] fields, boolean[] flags, String[] keywords, Filter filter, Sort sort, int pageNo, int pageSize, List<T> result, DocFetcher<T> ft) {
		try {
			long start = System.currentTimeMillis();
			
			StringBuffer fieldStr = new StringBuffer();
			StringBuffer keywordStr = new StringBuffer();
			
			QueryParser parser = null;
			BooleanQuery bq = new BooleanQuery();
			for (int i = 0; i < fields.length; ++i) {
				if (flags[i])
					parser = new QueryParser(Version.LUCENE_CURRENT, fields[i], analyzer);
				
				BooleanQuery bqInner = new BooleanQuery();
				for (int j = 0; j < keywords.length; ++j) {
					if (flags[i]) 
						bqInner.add(parser.parse(keywords[j]), BooleanClause.Occur.MUST);
					else bqInner.add(new TermQuery(new Term(fields[i], keywords[j])), BooleanClause.Occur.MUST);
				}
				
				bq.add(bqInner, BooleanClause.Occur.SHOULD);
				
				fieldStr.append('[').append(fields[i]).append("] ");
			}
			for (int i = 0; i < keywords.length; ++i) 
				keywordStr.append('[').append(keywords[i]).append("] ");
			
			infoLogger.info("##########Query starts#########");
			infoLogger.info("##FIELDS: " + fieldStr.toString());
			infoLogger.info("##KEYWORDS: " + keywordStr.toString());
			
			if (pageNo <= 0) pageNo = 1;
			if (pageSize <= 0) pageSize = 1;
			
			int limit = pageNo * pageSize; 
			TopDocs hits = sort != null ? searcher.search(bq, filter, limit, sort) : searcher.search(bq, filter, limit);
			ScoreDoc[] scoreDocs = hits.scoreDocs;
			
			int startIndex = pageNo * pageSize - pageSize;
			if (startIndex > scoreDocs.length)
				startIndex = (scoreDocs.length / pageSize) * pageSize;		//last page
			for (int i = startIndex; i < scoreDocs.length; ++i) {
				result.add(ft.get(searcher.doc(scoreDocs[i].doc)));
			}
			
			long interval = System.currentTimeMillis() - start;
			StringBuffer sb = new StringBuffer();
			infoLogger.info(sb.append("########## ").append(hits.totalHits).append(" Documents Found. (").append(interval / 1000).append(" seconds ").append(interval % 1000).append(" milliseconds").append(") #########").toString());
			return hits.totalHits;
		} catch (Exception ex) {
			exceptionLogger.fatal("Error occurs when searching.");
			exceptionLogger.fatal(ex.getMessage(), ex);
		}
		return 0;
	}

你可能感兴趣的:(Lucene,J#)