lucene 高亮

packagecom.feedsky.lucene;  
importjava.io.StringReader;  
importorg.apache.lucene.analysis.Analyzer;  
importorg.apache.lucene.analysis.TokenStream;  
importorg.apache.lucene.document.Document;  
importorg.apache.lucene.document.Field;  
importorg.apache.lucene.index.IndexWriter;  
importorg.apache.lucene.queryParser.QueryParser;  
importorg.apache.lucene.search.Hits;  
importorg.apache.lucene.search.IndexSearcher;  
importorg.apache.lucene.search.Query;  
importorg.apache.lucene.search.highlight.Highlighter;  
importorg.apache.lucene.search.highlight.QueryScorer;  
importorg.apache.lucene.search.highlight.SimpleFragmenter;  
importorg.apache.lucene.search.highlight.SimpleHTMLFormatter;  
importorg.apache.lucene.store.Directory;  
importorg.apache.lucene.store.RAMDirectory;  
publicclassHighLighterTest  
{  
  publicstaticvoidmain(String[]args)  
  {  
    StringfieldName="text";  
    Stringtext="《越狱3》播出时间再次延期急煞中国“狱友”巨型变形金刚惊现北京《龙珠Z》绝招威力排行《不能说的秘密》清纯版迅雷宽频合作发行《越狱动画版》";//检索内容  
    //采用猎兔分词  
    Analyzeranalyzer=newCnAnalyzer();  
    Directorydirectory=newRAMDirectory();  
    try 
    {  
      //索引  
      IndexWriteriwriter=newIndexWriter(directory,analyzer,true);  
      iwriter.setMaxFieldLength(25000);  
      Documentdoc=newDocument();  
      doc.add(newField(fieldName,text,Field.Store.YES,  
          Field.Index.TOKENIZED,  
          Field.TermVector.WITH_POSITIONS_OFFSETS));  
      iwriter.addDocument(doc);  
      iwriter.close();  
      IndexSearcherisearcher=newIndexSearcher(directory);  
        
      QueryParserqueryParse=newQueryParser(fieldName,analyzer);  
      Queryquery=queryParse.parse("越狱");  
      Hitshits=isearcher.search(query);  
      for(inti=0;i<hits.length();i++){  
        DocumentdocTemp=hits.doc(i);  
        Stringvalue=docTemp.get(fieldName);  
    //   对要高亮显示的字段格式化,这里只是加红色显示和加粗  
        SimpleHTMLFormattersHtmlF=newSimpleHTMLFormatter("<b><fontcolor='red'>","</font></b>");  
        Highlighterhighlighter=newHighlighter(sHtmlF,newQueryScorer(query));  
        highlighter.setTextFragmenter(newSimpleFragmenter(10));  
          
        if(value!=null){  
          TokenStreamtokenStream=analyzer.tokenStream(fieldName,newStringReader(value));  
          Stringstr=highlighter.getBestFragment(tokenStream,value);  
            
          System.out.println(str);  
        }  
     }  
      isearcher.close();  
      directory.close();  
    }  
    catch(Exceptione)  
    {  
      e.printStackTrace();  
    }  
  }  
} 

你可能感兴趣的:(apache,Lucene)