1 项目启动时,初始化连接es
private static Logger log = LoggerFactory.getLogger(ElasticsearchUtil.class);
private static TransportClient client;
private static Settings esSettings;
public static void init() {
//设置ES实例的名称.put("client.transport.sniff", true) //自动嗅探整个集群的状态,把集群中其他ES节点的ip添加到本地的客户端列表中
esSettings = Settings.builder().put("transport.type","netty3").put("http.type", "netty3").put("cluster.name", "elasticsearch").build();
try {
client = new PreBuiltTransportClient(esSettings);//初始化client较老版本发生了变化,此方法有几个重载方法,初始化插件等。
//此步骤添加IP,至少一个,其实一个就够了,因为添加了自动嗅探配置
String es_Ip = PropertiesUtil.getProValueByKey("elk", "es_Ip");
String es_port = PropertiesUtil.getProValueByKey("elk", "es_port");
client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(es_Ip), Integer.parseInt(es_port)));
} catch (UnknownHostException e) {
e.printStackTrace();
}
log.error("elasticsearch 连接成功");
}
2 判断当前索引是否存在
public static boolean isExistsIndex(String indexName){
IndicesExistsResponse response = client.admin().indices().exists(
new IndicesExistsRequest().indices(new String[]{indexName})).actionGet();
return response.isExists();
}
3 单个保存,创建index并保存数据,把其中的文档转化为json的格式存储
public static void createIndex(Object info, String index, String type, String id) throws ElasticsearchException, IOException {
client.prepareIndex(index, type, id).setSource(JsonUtil.toJson(info)).setRefreshPolicy("true").execute().actionGet();
}
4 批量保存
public static void batchInsertInfo(List<Map<String,Object>> logInfoMap){
BulkRequestBuilder bulkRequest=client.prepareBulk();
boolean flag=false;
for(int i=0,len=logInfoMap.size();i<len;i++){
Map<String,Object> map=logInfoMap.get(i);
String index=map.get("index").toString();
String id=map.get("uuid").toString();
bulkRequest.add(client.prepareIndex(index,"syslog").setSource(map).setId(id));
flag=true;
}
//判空处理
if(flag){
bulkRequest.execute().actionGet();
}
}
5 mapper索引,每个索引只能mapper一次,不能重复mapper
public static void createMapping( Class> object,String index, String type) throws ElasticsearchException,IOException {
if(!isExistsIndex(index)){
client.admin().indices().prepareCreate(index).execute().actionGet();
XContentBuilder xcontentBuilder = jsonBuilder().startObject().startObject("properties");
Field[] fields = object.getDeclaredFields();// 根据字段名来获取字段
//反射获取字段
for( Field field:fields ) {
if(field.getType().toString().contains( "String" )){
xcontentBuilder.startObject(field.getName()).field("type", "keyword").field("index", "not_analyzed").endObject();
}
}
xcontentBuilder.endObject().endObject();
PutMappingRequest mappingRequest = Requests.putMappingRequest(index).type(type).source(xcontentBuilder);
client.admin().indices().putMapping(mappingRequest).actionGet();
}
}
//使用方法 ElasticsearchUtil.createMapping(LogInfo.class, index, "syslog");
6 根据索引、类型、id 获取es单条数据
public static GetResponse getOneInfo(String index, String type,String id) {
return client.prepareGet(index, type,id).execute().actionGet();
}
7 单条删除 根据index、type、id删除执行文档
public static void delete(String index, String type, String id) {
DeleteResponse deleteresponse = client.prepareDelete(index, type, id).execute().actionGet();
//如果实时性要求高的话,需要强制刷新
//deleteresponse.forcedRefresh()
}
8 批量删除
//参数格式 index_type_id
public static boolean batchDelete(List<String> index_type_idList) {
try {
if(CollectionUtils.isNotEmpty( index_type_idList )){
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk().setRefreshPolicy("true");
for( String index_type_id:index_type_idList ) {
String[] arr=index_type_id.split( "_" );
DeleteRequestBuilder deleteRequestBuilder = client.prepareDelete(arr[0], arr[1], arr[2]);
bulkRequestBuilder.add( deleteRequestBuilder );
}
BulkResponse aa=bulkRequestBuilder.execute().actionGet();
return !aa.hasFailures();
}
} catch( Exception e ) {
e.printStackTrace();
}
return false;
}
9 删除索引及索引下的所有文档
public static void deleteIndex(String[] index) {
client.admin().indices().prepareDelete(index).execute().actionGet();
}
10 获取所有索引
public static String[] getAllIndex() {
ClusterStateResponse response = client.admin().cluster().prepareState().execute().actionGet();
return response.getState().getMetaData().getConcreteAllIndices();
}
11 项目启动时,创建备份仓库
public static void createSnapshotRepository() {
try {
Settings settings =esSettings.builder().put("location", "/opt/data/backup/my_backup").build();
PutRepositoryRequestBuilder putRepo = new PutRepositoryRequestBuilder(client.admin().cluster(), PutRepositoryAction.INSTANCE);
putRepo.setName( ElasticsearchEnum.ES_REPOSITORYNAME).setType("fs").setSettings(settings).execute().actionGet();
} catch (Exception e) {
e.printStackTrace();
}
}
12 执行备份
//参数 仓库名;备份的索引;备份名称
public static void createSnapshot(String repositoryName,String[] index, String snapshot ) {
CreateSnapshotRequestBuilder builder = new CreateSnapshotRequestBuilder(client.admin().cluster(), CreateSnapshotAction.INSTANCE);
builder.setRepository(repositoryName).setIndices(index).setSnapshot(snapshot);
builder.execute().actionGet();
}
13 恢复备份的索引及文档
//参数 仓库名称 ; 备份名称
public static void restoreSnapshot(String repositoryName, String snapshot) {
List<String> indices = Lists.newArrayList();
try {
GetSnapshotsRequestBuilder builder = new GetSnapshotsRequestBuilder(client.admin().cluster(), GetSnapshotsAction.INSTANCE);
builder.setRepository(repositoryName);
builder.setSnapshots(snapshot);
GetSnapshotsResponse getSnapshotsResponse = builder.execute().actionGet();
//检查索引是否存在,如果是,请关闭它,然后再恢复。
indices = getSnapshotsResponse.getSnapshots().get( 0 ).indices();
CloseIndexRequestBuilder closeIndexRequestBuilder = new CloseIndexRequestBuilder(client.admin().indices(), CloseIndexAction.INSTANCE);
closeIndexRequestBuilder.setIndices(indices.toArray(new String[indices.size()]));
closeIndexRequestBuilder.execute().actionGet();
} catch( Exception e ) {
log.error( "索引不存在,无法关闭!"+ ToStringBuilder.reflectionToString( indices.toArray(new String[indices.size()]) ));
}
// 执行恢复索引操作
RestoreSnapshotRequestBuilder restoreBuilder = new RestoreSnapshotRequestBuilder(client.admin().indices(), RestoreSnapshotAction.INSTANCE);
restoreBuilder.setRepository(repositoryName).setSnapshot(snapshot);
restoreBuilder.execute().actionGet();
}
14 丰富的条件查询
public static List<LogInfo> getAllLogInfo(String[] index,Criteria criteria) throws Exception {
List<LogInfo> logInfoList = Lists.newArrayList();
try {
//es搜索,按照接收时间降序
SearchRequestBuilder searchRequestBuilder = client.prepareSearch(index);
criteria.setCurrentPage( (criteria.getCurrentPage()+5)/6 );
searchRequestBuilder.setFrom(((criteria.getCurrentPage()-1) * criteria.getPageSize())*6).setSize( 6*criteria.getPageSize()).addSort("receivedTime", SortOrder.DESC);
Map<String, Object> condition = criteria.getCondition();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
if( condition !=null && condition.size()>0){
Iterator<Map.Entry<String,Object>> iterator = condition.entrySet().iterator();
while( iterator.hasNext() ) {
Map.Entry<String,Object> entry = iterator.next();
if("logLevel".equals( entry.getKey() ) || "facility".equals( entry.getKey() )){
QueryBuilder queryBuilder = QueryBuilders.matchQuery( entry.getKey(),entry.getValue() );
boolQueryBuilder.must(queryBuilder); //且的关系,如果是或则用.should()
}else if("logSourceUuid".equals( entry.getKey() )){
String[] split = entry.getValue().toString().split( "_" );
BoolQueryBuilder boolQueryBuilderIp = QueryBuilders.boolQuery();
for( int i = 0; i < split.length; i++ ) {
//正则匹配
QueryBuilder queryBuilder = QueryBuilders.wildcardQuery(entry.getKey(),"*"+split[i]+"*");
boolQueryBuilderIp.should(queryBuilder);
}
boolQueryBuilder.must(boolQueryBuilderIp);
}else if("receivedTime".equals( entry.getKey() )){
String start = entry.getValue().toString().split( "#" )[0].trim();
String end = entry.getValue().toString().split( "#" )[1].trim();
//通过时间过滤
QueryBuilder queryBuilder = QueryBuilders.rangeQuery(entry.getKey() ).from( start ).to( end );
boolQueryBuilder.must(queryBuilder);
}else if("rulelist".equals( entry.getKey() )){
List<StrategyRuleRel> strategyRuleRels = Lists.newArrayList();
List<Map<String,String>> values = ( List<Map<String,String>> )entry.getValue();
for( Map<String,String> maps:values ) {
StrategyRuleRel rule = JsonUtil.jsonToObject( JsonUtil.map2Json( maps ), StrategyRuleRel.class );
if (rule.getRuleType() == 1) { //以XXX开始
if (rule.getRuleRelation() == 1) {
PrefixQueryBuilder aa = QueryBuilders.prefixQuery(rule.getLogField(), rule.getContent());
boolQueryBuilder.must(aa);
} else {
boolQueryBuilder.should(QueryBuilders.prefixQuery(rule.getLogField(), rule.getContent()));
}
} else if (rule.getRuleType() == 2) { //以XXX结束
if (rule.getRuleRelation() == 1) {
boolQueryBuilder.must(QueryBuilders.wildcardQuery(rule.getLogField(), "*" + rule.getContent()));
} else {
boolQueryBuilder.should(QueryBuilders.wildcardQuery(rule.getLogField(), "*" + rule.getContent()));
}
} else if (rule.getRuleType() == 3) { //包含
if (rule.getRuleRelation() == 1) { boolQueryBuilder.must(QueryBuilders.wildcardQuery(rule.getLogField(), "*"+rule.getContent()+"*"));
} else {
boolQueryBuilder.should(QueryBuilders.wildcardQuery(rule.getLogField(), "*"+rule.getContent()+"*"));
}
}else if (rule.getRuleType() == 5) { //正则表达式
if (rule.getRuleRelation() == 1) {
boolQueryBuilder.must(QueryBuilders.regexpQuery(rule.getLogField(), rule.getContent()));
} else {
boolQueryBuilder.should(QueryBuilders.regexpQuery(rule.getLogField(), rule.getContent()));
}
} else if (rule.getRuleType() == 6) { //通配符
if (rule.getRuleRelation() == 1) {
boolQueryBuilder.must(QueryBuilders.wildcardQuery(rule.getLogField(), rule.getContent()));
} else {
boolQueryBuilder.should(QueryBuilders.wildcardQuery(rule.getLogField(), rule.getContent()));
}
}else if (rule.getRuleType() == 7) { //等于
if (rule.getRuleRelation() == 1) {
QueryBuilder queryBuilder = QueryBuilders.termQuery( rule.getLogField(),rule.getContent() );
boolQueryBuilder.must(queryBuilder);
} else {
boolQueryBuilder.should(QueryBuilders.wildcardQuery(rule.getLogField(), rule.getContent()));
}
}
}
}else {
QueryBuilder queryBuilder = QueryBuilders.wildcardQuery(entry.getKey(),"*"+entry.getValue().toString()+"*");
boolQueryBuilder.must(queryBuilder);
}
}
}
searchRequestBuilder.setQuery(boolQueryBuilder);
SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
SearchHits hits = searchResponse.getHits();
SearchHit[] searchHists = hits.getHits();
if (searchHists.length > 0) {
for (SearchHit hit : searchHists) {
//解析对象
Map<String, Object> objectMap = hit.getSource();
Iterator<Map.Entry<String, Object>> iterator = objectMap.entrySet().iterator();
//组装对象
LogInfo logInfo = new LogInfo();
while( iterator.hasNext() ) {
Map.Entry<String, Object> entry = iterator.next();
Field field = LogInfo.class.getDeclaredField(entry.getKey());// 根据字段名来获取字段
Method method = LogInfo.class.getMethod( "set" + StringUtil.capitalize( entry.getKey() ),new Class[] { field.getType() } );
if(field.getType()==Integer.class){
method.invoke( logInfo, Integer.parseInt( entry.getValue().toString() ));
}else{
method.invoke( logInfo, entry.getValue().toString() );
}
}
logInfo.setIndex( hit.getIndex() );
logInfo.setType( hit.getType() );
logInfo.setId( hit.getId() );
logInfoList.add( logInfo );
}
}
} catch(Exception e ) {
e.printStackTrace();
}
return logInfoList;
}
15 聚合查询【统计】
//参数: 查询涉及的索引;聚合的关键字段 ;开始时间; 结束时间
public static SearchResponse countLogInfoForCondition(String[] index,String field,String startTime,String endTime) throws Exception {
//去除不存在的索引,以免查询出错
List<String> indexTemp = getExistIndex( index );
if(CollectionUtils.isEmpty( indexTemp )){
return null;
}
//聚合查询,取十条
SearchRequestBuilder searchRequestBuilder = client.prepareSearch((String[])indexTemp.toArray(new String[indexTemp.size()]));
searchRequestBuilder.setFrom(0).setSize(10).addSort("receivedTime", SortOrder.DESC);
if(field != null){
AggregationBuilder aggregationBuilder = AggregationBuilders.terms( "aggregation" ).field( field );
searchRequestBuilder.addAggregation( aggregationBuilder );
}
//统计时间段范围的日志信息
if(startTime != null && endTime != null){
searchRequestBuilder.setQuery( QueryBuilders.rangeQuery("receivedTime").from(startTime).to(endTime) );
}
SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
return searchResponse;
}
//***************方法调用规则************************
SearchResponse searchResponse = ElasticsearchUtil.countLogInfoForCondition( index, "eventLevel", startTime, entTime);
if(searchResponse != null){
List<SysConfig> sysConfigs = sysConfigService.getSysCnfListByType( 9 );
Terms terms = searchResponse.getAggregations().get("aggregation");
List<Terms.Bucket> buckets = terms.getBuckets();
for(Terms.Bucket bucket:buckets){
System.out.println(bucket.getKey()+"#"+bucket.getDocCount());
}
}
更多官方API: https://www.elastic.co/guide/en/elasticsearch/reference