java使用elasticsearch-rest-high-level-client操作ElesticSearch-从数据库中批量导入数据

1、创建spring boot 项目,pom.xml如下:


        org.springframework.boot
        spring-boot-starter-parent
        2.0.1.RELEASE
         
    


    
        UTF-8
        UTF-8
        1.8
    

    

        
            org.mybatis.spring.boot
            mybatis-spring-boot-starter
            1.3.2
        

        
            mysql
            mysql-connector-java
            runtime
        

        
            org.springframework.boot
            spring-boot-starter-test
            test
        

        
            org.elasticsearch
            elasticsearch
            6.2.1
        

        
            org.elasticsearch.client
            elasticsearch-rest-high-level-client
            6.2.1
        


        
            junit
            junit
            4.12
        

        
            com.alibaba
            fastjson
            1.2.44
        


    

2、配置文件:application.yml

spring:
  datasource:
    driver-class-name: com.mysql.jdbc.Driver
    username: 用户名
    password: 密码
    url: mysql地址

3、创建实体类

package com.xuan.search.domain;

import java.util.Date;

/**
 * FileName: WordPracticeFile
 * Author:   xuan zongjun
 * Date:     2020/5/20 10:36
 * Description:
 */


public class WordPracticeFile {

    //主键ID
    private Integer id;
    //用户Id
    private String userId;
    //单词
    private String word;
    //记忆加权值
    private Double memoryWeight;
    //历史记忆加权值
    private Double memoryWeightHistory;
    //历史记忆值
    private Double memoryValueHistory;
    //初始权重
    private Integer initMemoryWeight;
    //学习次数
    private Integer learnTheNumber;
    //当前学习时间
    private Date currentLearnTime;
    //下次学习时间
    private Date nextLearnTime;
    //写加权值
    private Double writeWeight;
    //读加权值
    private Double readWeight;
    //发音加权值
    private Double pronunciationWeight;
    //上次加权值
    private Date addWeightLastTime;
    //创建时间
    private Date createTime;
    //修改时间
    private Date updateTime;
    //记忆值
    private Double memoryValue;
    //词汇类型:1 巩固 2 熟练 3 印象 4 模糊 5 遗忘
    private Integer type;
    //竞赛资源包ID
    private Integer competitionResourceBundleId;

    get/set省略
}

4、Mapper方法

package com.xuan.search.dao;

import com.xuan.search.domain.WordPracticeFile;
import org.apache.ibatis.annotations.*;

import java.util.List;

@Mapper
public interface WordPracticeFileMapper {

    @Select( "select * from word_practice_file limit #{start},#{size}" )
    @Results(id="wordPracticeFileMap",value={
            @Result(property ="userId" ,column ="user_id" ),
            @Result(property ="word" ,column ="word" ),
            @Result(property ="memoryWeight" ,column ="memory_weight" ),
            @Result(property ="initMemoryWeight" ,column ="init_memory_weight" ),
            @Result(property ="memoryWeightHistory" ,column ="memory_weight_history" ),
            @Result(property ="memoryValueHistory" ,column ="memory_value_history" ),
            @Result(property ="learnTheNumber" ,column ="learn_the_number" ),
            @Result(property ="currentLearnTime" ,column ="current_learn_time" ),
            @Result(property ="nextLearnTime" ,column ="next_learn_time" ),
            @Result(property ="writeWeight" ,column ="write_weight" ),
            @Result(property ="readWeight" ,column ="read_weight" ),
            @Result(property ="pronunciationWeight" ,column ="pronunciation_weight" ),
            @Result(property ="createTime" ,column ="create_time" ),
            @Result(property ="updateTime" ,column ="update_time" ),
    })
    List getByUserIdAndWord(@Param( "start" ) Integer start, @Param( "size" )Integer size);


    @Select( "select count(*) from word_practice_file " )
    int getCount();




}

5、测试代码

package com.xuan.test;

import com.alibaba.fastjson.JSON;
import com.xuan.search.SearchApplication;
import com.xuan.search.dao.WordPracticeFileMapper;
import com.xuan.search.domain.WordPracticeFile;
import org.apache.http.HttpHost;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.*;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;

/**
 * FileName: TestES
 * Author:   xuan zongjun
 * Date:     2020/5/11 10:20
 * Description:
 */

@SpringBootTest(classes = SearchApplication.class)
@RunWith(SpringRunner.class)
public class TestES {

    @Autowired
    private WordPracticeFileMapper wordPracticeFileMapper;

    private RestHighLevelClient client = null;


    @Before
    public void prepare() {
        // 创建Client连接对象
        //String[] ips = {"127.0.0.1:9200"};
        String[] ips = {"47.104.193.89:9200", "47.104.180.62:9200"};
        HttpHost[] httpHosts = new HttpHost[ips.length];
        for (int i = 0; i < ips.length; i++) {
            httpHosts[i] = HttpHost.create( ips[i] );
        }
        RestClientBuilder builder = RestClient.builder( httpHosts );
        client = new RestHighLevelClient( builder );
    }

    @Test
    public void testWordPracticeFile() {
        List wordPracticeFileList = wordPracticeFileMapper.getByUserIdAndWord( 0, 10 );
        System.out.println( wordPracticeFileList.size() );
        System.out.println( wordPracticeFileList.get( 0 ).getUserId() );
    }

/**
     * 创建单词档案表的 索引 和 映射
     *
     * @throws IOException
     */
    @Test
    public void createIndexByWordProfile() throws IOException {

        //创建名称为blog2的索
        CreateIndexRequest request = new CreateIndexRequest( "word_profile" );

        //设置映射 practice 为type名称
        request.mapping( "practice", " {\n" +
                " \t\"properties\": {\n" +
                "           \"user_id\": {\n" +
                "              \"type\": \"keyword\",\n" +
                "              \"index\":\"true\",\n" +
                "              \"store\":\"true\"\n" +
                "           },\n" +
                "           \"word\": {\n" +
                "              \"type\": \"keyword\",\n" +
                "              \"index\":\"true\",\n" +
                "              \"store\":\"true\"\n" +
                "           },\n" +
                "           \"memory_weight\": {\n" +
                "              \"type\": \"float\"\n" +
                "           },\n" +
                "           \"memory_weight_history\": {\n" +
                "              \"type\": \"float\"\n" +
                "           },\n" +
                "           \"init_memory_weight\": {\n" +
                "             \"type\": \"integer\"\n" +
                "           },\n" +
                "           \"memory_value_history\": {\n" +
                "             \"type\": \"float\"\n" +
                "           },\n" +
                "           \"learn_the_number\": {\n" +
                "             \"type\": \"integer\"\n" +
                "           },\n" +
                "           \"current_learn_time\": {\n" +
                "             \"type\": \"date\",\n" +
                "             \"format\": \"yyyy-MM-dd HH:mm:ss\"\n" +
                "           },\n" +
                "           \"next_learn_time\": {\n" +
                "             \"type\": \"date\",\n" +
                "             \"format\": \"yyyy-MM-dd HH:mm:ss\"\n" +
                "           },\n" +
                "           \"write_weight\": {\n" +
                "             \"type\": \"float\"\n" +
                "           },\n" +
                "           \"read_weight\": {\n" +
                "             \"type\": \"float\"\n" +
                "           },\n" +
                "           \"pronunciation_weight\": {\n" +
                "             \"type\": \"float\"\n" +
                "           },\n" +
                "           \"competition_resource_bundle_id\": {\n" +
                "             \"type\": \"integer\"\n" +
                "           }\n" +
                "        }\n" +
                "}", XContentType.JSON );


        CreateIndexResponse createIndexResponse = client.indices().create( request );
        System.out.println( JSON.toJSONString( createIndexResponse ) );

        //释放资源
        client.close();
    }


 /**
     * 把单词档案的历史数据插入到ES中  -- 批量插入
     */
    @Test
    public void testAddBulkRequest() throws IOException {

        int count = wordPracticeFileMapper.getCount();
        List> list =null;
        for (int i = 0; i < count; i++) {
            List wordPracticeFileList = wordPracticeFileMapper.getByUserIdAndWord( i, 1000 );
            System.out.println( i );

            SimpleDateFormat simpleDateFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss" );

            list= new ArrayList<>(  );
            for (WordPracticeFile wordPracticeFile : wordPracticeFileList) {

                //准备json数据
                Map jsonMap = new HashMap<>();
                jsonMap.put( "id", wordPracticeFile.getId() );
                jsonMap.put( "user_id", wordPracticeFile.getUserId() );
                jsonMap.put( "word", wordPracticeFile.getWord() );
                jsonMap.put( "memory_weight", wordPracticeFile.getMemoryWeight() );
                jsonMap.put( "memory_weight_history", wordPracticeFile.getMemoryWeightHistory() );
                jsonMap.put( "init_memory_weight", wordPracticeFile.getInitMemoryWeight() );
                jsonMap.put( "memory_value_history", wordPracticeFile.getMemoryValueHistory() );
                jsonMap.put( "learn_the_number", wordPracticeFile.getLearnTheNumber() );
                jsonMap.put( "current_learn_time", simpleDateFormat.format( wordPracticeFile.getCurrentLearnTime() ) );
                jsonMap.put( "next_learn_time", simpleDateFormat.format( wordPracticeFile.getNextLearnTime() ) );
                jsonMap.put( "write_weight", wordPracticeFile.getWriteWeight() );
                jsonMap.put( "read_weight", wordPracticeFile.getReadWeight() );
                jsonMap.put( "competition_resource_bundle_id", wordPracticeFile.getCompetitionResourceBundleId() );


                list.add( jsonMap );

                //索引请求对象
                IndexRequest indexRequest = new IndexRequest( "word_profile", "practice" ).id( String.valueOf( wordPracticeFile.getId() ) );
                //指定索引文档内容
                indexRequest.source( jsonMap );

            }

            bulkPutIndex(list,"word_profile","practice");
            i+=999;
        }

    }
    //批量插入
    private void bulkPutIndex(List> list,String index,String type) throws IOException {

        int size = list.size();
        BulkRequest request = new BulkRequest();
        for (int i = 0; i < size; i++) {
            Map map = list.get( i );
            //这里必须每次都使用new IndexRequest(index,type),不然只会插入最后一条记录(这样插入不会覆盖已经存在的Id,也就是不能更新)
            //request.add(new IndexRequest(index,type).opType("create").id(map.remove("id").toString()).source(map));
            request.add( new IndexRequest( index, type ,String.valueOf( map.get( "id" ) )).source( map, XContentType.JSON ) );
        }

        client.bulk( request );
    }

}

 

你可能感兴趣的:(java,elasticsearch)