<?xml version="1.0" encoding="UTF-8" ?>
<config>
<!-- 当配置文件出错时退出 -->
<abortOnConfigurationError>${solr.abortOnConfigurationError:true}</abortOnConfigurationError>
<luceneMatchVersion>4.5</luceneMatchVersion>
<directoryFactory name="DirectoryFactory"
class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}" />
<codecFactory class="solr.SchemaCodecFactory" />
<schemaFactory class="ClassicIndexSchemaFactory" />
<indexConfig>
<!-- 它对所有域生效,可以将它注释掉,不限制域中的词元个数。即使没有强制限制,你还要受Java内存分配的限制,如果超过内存分配限制,就会抛出错误 -->
<filter class="solr.LimitTokenCountFilterFactory"
maxTokenCount="10000" />
<!-- 设置写锁的延迟时间 -->
<writeLockTimeout>10000</writeLockTimeout>
<maxIndexingThreads>20</maxIndexingThreads>
<!-- 设置 UseCompoundFile 为 true 时,solr 会将数据索引到一个索引文件中。 有的操作系统有限制一个线程打开文件的数量,
设置这个值为 true 时,可以避免达到这个文件数量限制。 这个设置也将影响索引复制时需要传输的数据量 -->
<useCompoundFile>false</useCompoundFile>
<!-- 缓存大小文档数大小,达到大小后将执行更新动作。如果两个值都被设置,谁先达到谁先触发动作 -->
<ramBufferSizeMB>1024</ramBufferSizeMB>
<!-- 设置索引刷新到磁盘前,缓存在内存中文档的数量。Solr 默认情况下没有设置该值 -->
<maxBufferedDocs>10000</maxBufferedDocs>
<mergePolicy class="org.apache.lucene.index.TieredMergePolicy">
<int name="maxMergeAtOnce">10</int>
<int name="segmentsPerTier">10</int>
</mergePolicy>
<mergeFactor>10</mergeFactor>
<mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler" />
<lockType>${solr.lock.type:native}</lockType>
<!-- 启动时解锁,所类型为none或single只读锁时无效,默认设置为假。 -->
<unlockOnStartup>true</unlockOnStartup>
<!-- 将经常使用的内容加入内存,默认128,大部分时候够用了-->
<termIndexInterval>128</termIndexInterval>
<!-- 如果为true,索引读取器将重新打开(通常更有效率) 代替关闭索引之后再重新打开索引读取器 -->
<reopenReaders>true</reopenReaders>
<!-- 合并策略 -->
<deletionPolicy class="solr.SolrDeletionPolicy">
<!-- 最多持有提交点的数量 -->
<str name="maxCommitsToKeep">1</str>
<!-- 最多持有优化提交点的数量 -->
<str name="maxOptimizedCommitsToKeep">0</str>
<!-- 一旦达到指定的时间删除所有的提交点 -->
<str name="maxCommitAge">2MINUTES</str>
<str name="maxCommitAge">1DAY</str>
</deletionPolicy>
<!-- 将indexWriter执行的调试信息输出到指定文本中 -->
<infoStream file="INFOSTREAM.txt">false</infoStream>
</indexConfig>
<!-- 默认的高性能更新接口 -->
<updateHandler class="solr.DirectUpdateHandler2">
<updateLog class="solr.FSUpdateLog">
<str name="dir">${solr.data.dir:}</str>
</updateLog>
<maxPendingDeletes>10000</maxPendingDeletes>
<autoCommit>
<maxDocs>6000000</maxDocs>
<!--maxTime>100000</maxTime -->
<openSearcher>false</openSearcher>
</autoCommit>
<autoSoftCommit>
<maxTime>100000</maxTime>
</autoSoftCommit>
</updateHandler>
<query>
<maxBooleanClauses>1024</maxBooleanClauses>
<filterCache class="solr.FastLRUCache" size="100"
initialSize="100" autowarmCount="100" timeDecay="true" />
<!-- queryResultCache class="solr.LFUCache" size="100"
initialSize="100" autowarmCount="100" timeDecay="true" /-->
<!-- documentCache class="solr.LFUCache" size="100"
initialSize="100" autowarmCount="0" timeDecay="true" /-->
<enableLazyFieldLoading>true</enableLazyFieldLoading>
<useFilterForSortedQuery>true</useFilterForSortedQuery>
<queryResultWindowSize>40</queryResultWindowSize>
<!-- 查询结果文档的最大缓存数 -->
<queryResultMaxDocsCached>10</queryResultMaxDocsCached>
<!-- 使用云搜索 -->
<useColdSearcher>false</useColdSearcher>
<maxWarmingSearchers>2</maxWarmingSearchers>
<HashDocSet maxSize="10000" loadFactor="0.75" />
</query>
<!-- HTTP 缓存,不输出header头 -->
<requestDispatcher handleSelect="true">
<requestParsers enableRemoteStreaming="true"
multipartUploadLimitInKB="2048000" formdataUploadLimitInKB="2048" />
<httpCaching never304="true" />
</requestDispatcher>
<requestHandler name="/select" class="solr.SearchHandler">
<lst name="defaults">
<str name="defType">edismax</str>
<str name="echoParams">explicit</str>
<str name="pf">namebak tags</str>
<str name="qf">namebak^10.0 tags^9</str>
<str name="mm">4<-1 8<60%</str>
<str name="bf">
sum(recip(ms(NOW,lastupdate),1,1,300),recip(ms(NOW,releasedate),1,0.1,100),div(sum(new,hot,jingdian),9))
</str>
</lst>
</requestHandler>
<!-- SolrClound 必须 -->
<requestHandler name="/get" class="solr.RealTimeGetHandler">
<lst name="defaults">
<str name="omitHeader">true</str>
<str name="wt">json</str>
<str name="indent">true</str>
</lst>
</requestHandler>
<requestHandler name="/admin/" class="solr.admin.AdminHandlers" />
<requestHandler name="/query" class="solr.SearchHandler">
<lst name="defaults">
<str name="defType">edismax</str>
<str name="echoParams">explicit</str>
<str name="wt">json</str>
<str name="indent">true</str>
<str name="df">namebak tags</str>
</lst>
</requestHandler>
<requestHandler name="/update" class="solr.UpdateRequestHandler">
<lst name="defaults">
<str name="stream.contentType">application/document</str>
</lst>
</requestHandler>
<!-- 字段分析接口,分词接口-->
<requestHandler name="/analysis/field" startup="lazy"
class="solr.FieldAnalysisRequestHandler" />
<requestHandler name="/analysis/document"
class="solr.DocumentAnalysisRequestHandler" startup="lazy" />
<requestHandler name="/replication" class="solr.ReplicationHandler">
<lst name="slave">
<str name="masterUrl">http://42.96.194.6:8088/solr/core_album/replication
</str>
<!--str name="pollInterval">00:01:00</str -->
<str name="compression">internal</str>
<str name="httpConnTimeout">500</str>
<str name="httpReadTimeout">10000</str>
<str name="httpBasicAuthUser">core_album</str>
<str name="httpBasicAuthPassword">core_album</str>
</lst>
</requestHandler>
<admin>
<defaultQuery>*:*</defaultQuery>
</admin>
</config>