项目中使用spring-data-elasticsearch做的集成(用的transportClient客户端9300端口)
由于项目需要迁移到欧洲部署,es使用阿里云提供的加密版,导致9300端口无法连接,故改用9200端口(http)
流程:
1.集成rest客户端,加入配置,在测试类中测通;
2.改造涉及到es的所有代码
3.删除涉及到transportClient的代码、配置、依赖
4.改造完成
<dependency>
<groupId>org.elasticsearch.clientgroupId>
<artifactId>elasticsearch-rest-clientartifactId>
<version>6.3.2version>
dependency>
<dependency>
<groupId>org.elasticsearch.clientgroupId>
<artifactId>elasticsearch-rest-high-level-clientartifactId>
<version>6.3.2version>
dependency>
elasticSearch:
host: 10.101.99.189
port: 9200
username: elastic
password: 123456
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
/**
* es高级客户端配置
*
* @author GaoJinShan
*/
@Configuration
public class ElasticsearchConfiguration implements FactoryBean<RestHighLevelClient>, InitializingBean, DisposableBean {
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchConfiguration.class);
@Value("${elasticSearch.host}")
private String host;
@Value("${elasticSearch.port}")
private int port;
@Value("${elasticSearch.username}")
private String username;
@Value("${elasticSearch.password}")
private String password;
private RestHighLevelClient restHighLevelClient;
@Override
public void destroy() throws Exception {
try {
LOGGER.info("Closing elasticSearch client");
if (restHighLevelClient != null) {
restHighLevelClient.close();
}
} catch (final Exception e) {
LOGGER.error("Error closing ElasticSearch client: ", e);
}
}
@Override
public RestHighLevelClient getObject() throws Exception {
return restHighLevelClient;
}
@Override
public Class<RestHighLevelClient> getObjectType() {
return RestHighLevelClient.class;
}
@Override
public boolean isSingleton() {
return false;
}
@Override
public void afterPropertiesSet() throws Exception {
buildClient();
}
protected void buildClient() {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password));
RestClientBuilder builder = RestClient.builder(new HttpHost(host, port))
.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
@Override
public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
}
});
restHighLevelClient = new RestHighLevelClient(builder);
}
}
import org.elasticsearch.action.bulk.*;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.concurrent.TimeUnit;
@Service
public class ElasticSearchUtil {
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticSearchUtil.class);
@Autowired
private RestHighLevelClient restHighLevelClient;
private BulkProcessor bulkProcessor;
@PostConstruct
public void init() {
BulkProcessor.Listener listener = new BulkProcessor.Listener() {
@Override
public void beforeBulk(long executionId, BulkRequest request) {
int numberOfActions = request.numberOfActions();
LOGGER.info("Executing bulk [{}] with {} requests", executionId, numberOfActions);
}
@Override
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
if (response.hasFailures()) {
LOGGER.error("Bulk [{}] executed with failures,response = {}", executionId, response.buildFailureMessage());
} else {
LOGGER.info("Bulk [{}] completed in {} milliseconds", executionId, response.getTook().getMillis());
}
BulkItemResponse[] responses = response.getItems();
}
@Override
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
LOGGER.error("Failed to execute bulk", failure);
}
};
BulkProcessor bulkProcessor = BulkProcessor.builder(restHighLevelClient::bulkAsync, listener)
// 1000条数据请求执行一次bulk
.setBulkActions(1000)
// 5mb的数据刷新一次bulk
.setBulkSize(new ByteSizeValue(5L, ByteSizeUnit.MB))
// 并发请求数量, 0不并发, 1并发允许执行
.setConcurrentRequests(0)
// 固定1s必须刷新一次
.setFlushInterval(TimeValue.timeValueSeconds(1L))
// 重试5次,间隔1s
.setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(1L), 5))
.build();
this.bulkProcessor = bulkProcessor;
}
@PreDestroy
public void destroy() {
try {
bulkProcessor.awaitClose(30, TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOGGER.error("Failed to close bulkProcessor", e);
}
LOGGER.info("bulkProcessor closed!");
}
/**
* 修改
*
* @param request
*/
public void update(UpdateRequest request) {
this.bulkProcessor.add(request);
}
/**
* 新增
*
* @param request
*/
public void insert(IndexRequest request) {
this.bulkProcessor.add(request);
}
@Autowired
private RestHighLevelClient restHighLevelClient;
@Test
public void contextLoads() throws IOException, InvocationTargetException, IllegalAccessException {
//"admin_log"是es中的索引
//SearchRequest是包装查询请求
SearchRequest searchRequest = new SearchRequest("admin_log");
//sourceBuilder 是用来构建查询条件
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
//添加查询条件就和transportClient中的一样了
sourceBuilder.query(QueryBuilders.boolQuery().must(QueryBuilders.termsQuery("appName.keyword", "aaaaaa-mng")));
searchRequest.source(sourceBuilder);
SearchResponse searchResponse = restHighLevelClient.search(searchRequest);
SearchHit[] searchHits = searchResponse.getHits().getHits();
//AdminLog是我定义的实体类
ArrayList<AdminLog> adminLogs = new ArrayList<>();
//用来转换存储查询到的
for (SearchHit hit : searchHits) {
Map<String, Object> sourceAsMap = hit.getSourceAsMap();
AdminLog adminLog = new AdminLog();
//map赋值给实体类对象(key和对象属性务必一致)
BeanUtils.populate(adminLog, sourceAsMap);
adminLogs.add(adminLog);
}
}
@Autowired
private RestHighLevelClient restHighLevelClient;
@Test
public void aggsSearch() {
SearchRequest searchRequest = new SearchRequest("admin_log");
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
searchRequest.source(sourceBuilder);
sourceBuilder.aggregation(AggregationBuilders.terms("xxx_NAME").field("appName.keyword").size(10000));
try {
SearchResponse searchResponse = restHighLevelClient.search(searchRequest);
Terms aggregationResult = searchResponse.getAggregations().get("xxx_NAME");
List<? extends Terms.Bucket> buckets = aggregationResult.getBuckets();
for (Terms.Bucket tb:buckets) {
System.out.println(tb.getKey());
}
} catch (IOException e) {
e.printStackTrace();
}
}
@Autowired
private RestHighLevelClient restHighLevelClient;
@Test
public void restClientTest() throws IOException {
Map<String, String> params = Collections.emptyMap();
String queryString = "{\"sort\": [\n" +
" {\n" +
" \"esCreateTime\": {\n" +
" \"order\": \"desc\"\n" +
" }\n" +
" }\n" +
"]}";
HttpEntity entity = new NStringEntity(queryString, ContentType.APPLICATION_JSON);
//rest高级客户端是基于低级客户端的,利用getLowLevelClient()函数从高级客户端中获取就ok了
Response response = restHighLevelClient.getLowLevelClient().performRequest("GET", "/admin_log/_search", params, entity);
}
注: 本文restHighLevelClient配置部分参考自https://my.oschina.net/u/3795437/blog/3022874