Java实现elasticsearch 多字段分组汇聚及分页

 

由于elastic search自身没有提供汇聚以后进行分组的方法,所以需要自己通过代码来实现汇聚以后的分组功能。

下面是代码:

package com.qq.test;

import com.qq.entity.User;
import com.sun.javafx.binding.SelectBinding;
import com.sun.org.apache.bcel.internal.generic.IFNE;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.beanutils.BeanUtilsBean2;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.WildcardQueryBuilder;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.junit.Test;

import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;

/**
 * @author peixiaopeng
 * @date 19-5-9 - 上午10:02
 */
public class EsGroupPage {

    @Test
    public void testFindCusMap() {
        try {
            Settings settings = Settings.builder().put("cluster.name", "elasticsearch").build();
            //create  client
            TransportClient client = new PreBuiltTransportClient(settings).addTransportAddress(new TransportAddress(InetAddress.getByName("192.168.0.105"), 9300));

            QueryBuilder query = QueryBuilders.wildcardQuery("name", "*");
            int pageTotle = 0;
            int pageNum =3;//第一页
            int pageSize = 3;//每页多少条数据
            if (pageTotle <= 0) {
                TermsAggregationBuilder birthdayTerm = AggregationBuilders.terms("addr_count").field("address.keyword")
                        .subAggregation(AggregationBuilders.terms("age_count").field("age").subAggregation(AggregationBuilders.terms("bir_count").field("birthday")));
                CardinalityAggregationBuilder builder = AggregationBuilders.cardinality("total").field("birthday");
                SearchRequestBuilder requestBuilder = client.prepareSearch("lib3").setQuery(query).addAggregation(birthdayTerm);
                Aggregations aggregations = requestBuilder.get().getAggregations();
                Terms terms = aggregations.get("addr_count");
                long sum = 0;
                for (Terms.Bucket entry:terms.getBuckets()){

                    Terms terms1 = entry.getAggregations().get("age_count");
                    for (Terms.Bucket entry1:terms1.getBuckets()){
                        Terms terms2 = entry1.getAggregations().get("bir_count");
                        for (Terms.Bucket entry2:terms2.getBuckets()){
//                            System.out.println("key--3-"+entry2.getKey()+"----value---"+entry2.getDocCount());
                            sum++;
                        }
                    }
//                    System.out.println(sum);
                }

            }
            TermsAggregationBuilder addrTerms = AggregationBuilders.terms("addr_count").field("address.keyword");
            TermsAggregationBuilder ageTerms = AggregationBuilders.terms("age_count").field("age");
            addrTerms .subAggregation(ageTerms.subAggregation(AggregationBuilders.terms("bir_count").field("birthday")));
            int startInxdex = (pageNum -1 )* pageSize;
            int endIndex = pageSize * pageNum;
            if (pageSize > 0){
                addrTerms.size(endIndex);
            }
            SearchRequestBuilder builder1 = client.prepareSearch("lib3").setQuery(query).addAggregation(addrTerms);
//            builder1.addSort("address.keyword", SortOrder.ASC);
//            builder1.addSort("age",SortOrder.ASC);
            SearchResponse response = builder1.get();
            Terms addrResult = response.getAggregations().get("addr_count");
            List dataList = new ArrayList();
            int i = 0;
            for (Terms.Bucket addrEntry:addrResult.getBuckets()){
                User user = new User();
                System.out.println("addr---"+addrEntry.getKeyAsString());
                user.setAddress(addrEntry.getKeyAsString());

                Terms ageResult = addrEntry.getAggregations().get("age_count");
                for (Terms.Bucket ageEntry:ageResult.getBuckets()){
                    System.out.println("age---"+ageEntry.getKeyAsString());
                    user.setAge((long)ageEntry.getKeyAsNumber());


                    Terms birResult = ageEntry.getAggregations().get("bir_count");
                    for (Terms.Bucket birEntry:birResult.getBuckets()){
                        if (i++ < startInxdex){
                            continue;
                        }
                        if (i > endIndex){
                            break;
                        }
                        System.out.println("bir---"+birEntry.getKeyAsString());
                        user.setBirthday(birEntry.getKeyAsString());
                        user.setCount(birEntry.getDocCount());
                        User user1 = new User();
                        //前面是要拷贝到的目标对象
                        BeanUtils.copyProperties(user1,user);
                        dataList.add(user1);
                    }
                }
            }

            System.out.println(dataList);

        } catch (Exception e) {
            e.printStackTrace();
        }
    }


}

这里可以通过kibana 的页面进行查询看java分组出来的是否正确:

GET /lib3/user/_search
{
  "size": 0, #这个是不需要查看文档具体内容
  
  "aggs": {
    "addr_count": {
      "terms": {
        "field": "address.keyword"
      },
      "aggs": {
        "age_count": {
          "terms": {
            "field": "age"
          },
          "aggs": {
            "birthday_count": {
              "terms": {
                "field": "birthday"
              }
            }
        }
      }
  }
}
}
}

注:如果含有时间汇聚的时候,汇聚稍有不同(elastic search可以根据实际那字段按照 年、月、日、小时进行汇聚),请查看下面这篇博客。关键方法 dateHistogram().field().dateHistogramInterval(new DateHistogramInterval("1m")); 按照每月进行汇聚

 DateHistogramAggregationBuilder dateHistogramInterval = AggregationBuilders.dateHistogram("lastEndDateInterval").field("lastEndDate").dateHistogramInterval(new DateHistogramInterval("1M"));


 

https://blog.csdn.net/jiahao1186/article/details/82887173

 

 

你可能感兴趣的:(Java实现elasticsearch 多字段分组汇聚及分页)