简单的使用Redis写入和查询数据

我们以将字典数据写入Redis中,并查询数据为例

1、查询字典数据并写入Redis中

    @Resource
    private TestMapper testMapper;

    @Resource
    ShardingRedisUtil shardingRedisUtil;

    @Override
    public void testSetRedis() {
        // 获取所有字典信息
        List list = testMapper.selectAll();
        // 根据字典类型分组
        Map> distMap = list.stream()
                .collect(Collectors.groupingBy(TbSysDictInfo::getTypeCode));
        // 将数据写入redis
        shardingRedisUtil.hMSet(Dictconstants.DICT_RESULT, distMap);
    }

Dictconstants方法

public class Dictconstants {

    /**
     * 字典结果属性
     */
    public static final String DICT_RESULT = "dict_result_";

}

2、根据字典类型从Redis中查询数据

    @Override
    public List testGetRedis(String type) {
        // 根据字典类型获取列表
        Map> distMap = shardingRedisUtil.hMGet(new TypeReference>() {
        }, Dictconstants.DICT_RESULT, type);
        if (distMap.get(type)!=null){
            return distMap.get(type);
        }else {
            return new ArrayList<>();
        }
    }

3、RedisUtil工具类 ShardingRedisUtil 

这个工具类功能很强大,大家可以自行挖掘


import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.core.Cursor;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ScanOptions;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;

import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

@Component
public class ShardingRedisUtil {

    Logger logger = LoggerFactory.getLogger(this.getClass());

    @Resource
    private RedisTemplate redisTemplate;

    private static final int SHARDING_COUNT = 100; //分片数
    private static final int SCAN_COUNT = 1000; //扫描分批数
    private ObjectMapper objectMapper = new ObjectMapper();

    public void hMSet(String key, Map map) {
        hMSet(key, map, SHARDING_COUNT);
    }

    public  Map hMGet(TypeReference typeReference, String key, String... field) {
        return hMGet(typeReference, SHARDING_COUNT, key, field);
    }

    public  Map hMGet(TypeReference typeReference, String key, List fields) {
        if (CollectionUtils.isEmpty(fields)) {
            return null;
        }
        return hMGet(typeReference, key, fields.toArray(new String[1]));
    }

    public long hDel(String key, String... field) {
        return hDel(SHARDING_COUNT, key, field);
    }

    public void hMSet(String key, Map map, int shardingCount) {
        if (!CollectionUtils.isEmpty(map)) {
            Map> tmpMap = new HashMap<>();
            for (Map.Entry entry : map.entrySet()) {
                String tmpKey = entry.getKey();
                Object tmpValue = entry.getValue();
                int num = Math.abs(tmpKey.hashCode() % shardingCount);
                String hashKey = String.format("%s$%s", key, num);
                tmpMap.computeIfAbsent(hashKey, k -> new HashMap<>()).put(tmpKey, tmpValue);
            }
            for (Map.Entry> entry : tmpMap.entrySet()) {
                redisTemplate.opsForHash().putAll(entry.getKey(), entry.getValue());
            }
        }
    }

    public void hMSetToOneGroup(String key, Map map) {
        if (!CollectionUtils.isEmpty(map)) {
            redisTemplate.opsForHash().putAll(key, map);
        }
    }

    public  Map hMGetfromOneGroup(TypeReference typeReference, String key, String... field) {
        Map resultMap = new HashMap<>();
        if (field != null) {
            List objects = redisTemplate.opsForHash().multiGet(key, Arrays.asList(field));
            List fieldList = new ArrayList<>();
            Collections.addAll(fieldList, field);
            for (int i = 0; i < fieldList.size(); i++) {
                Object o = objects.get(i);
                resultMap.put(fieldList.get(i), o != null ? objectMapper.convertValue(o, typeReference) : null);
            }
        }
        return resultMap;
    }

    public  Map hMGet(TypeReference typeReference, int shardingCount, String key, String... field) {
        Map resultMap = new HashMap<>();
        if (field != null) {
            for (Map.Entry> entry : separateToMap(shardingCount, key, field).entrySet()) {
                List fieldList = entry.getValue();
                List objects = redisTemplate.opsForHash().multiGet(entry.getKey(), new ArrayList<>(fieldList));
                for (int i = 0; i < fieldList.size(); i++) {
                    Object o = objects.get(i);
                    resultMap.put(fieldList.get(i), o != null ? objectMapper.convertValue(o, typeReference) : null);
                }
            }
        }
        return resultMap;
    }

    /**
     * 扫描key前缀,获取key的所有redis缓存
     *
     * @param keyPattern
     * @param 
     * @return
     */
    public  Map hMGet(TypeReference typeReference, String keyPattern) {
        return hMGet(typeReference, SHARDING_COUNT, keyPattern);
    }

    /**
     * 扫描key前缀,获取key的所有redis缓存
     *
     * @param keyPattern
     * @param 
     * @return
     */
    public  Map hMGet(TypeReference typeReference, int shardingCount, String keyPattern) {
        Map resultMap = new HashMap<>();
        if (StringUtils.isNoneBlank(keyPattern)) {
            try {
                for (int num = 0; num < shardingCount; num++) {
                    String hashKey = String.format("%s$%s", keyPattern, num);
//                    Cursor> cursor = redisTemplate.opsForHash().scan(hashKey, ScanOptions.scanOptions().count(SCAN_COUNT).build());
//                    while (cursor.hasNext()) {
//                        Map.Entry next = cursor.next();
//                        Object field = next.getKey();
//                        Object value = next.getValue();
//                        if (field != null && value != null) {
//                            resultMap.put(field.toString(), objectMapper.convertValue(value, typeReference));
//                        }
//                    }
//                    cursor.close();
                    resultMap.putAll(hMGetTargetKey(typeReference,hashKey));
                }
            } catch (Exception e) {
                logger.error("扫描redis获取hashKey所有缓存异常", e);
            }
        }
        return resultMap;
    }

    /**
     * 扫描key前缀,获取key的所有redis缓存
     *
     * @param key
     * @param 
     * @return
     */
    public  Map hMGetTargetKey(TypeReference typeReference, String key) {
        Map resultMap = new HashMap<>();
        if (StringUtils.isNoneBlank(key)) {
            try {
                Cursor> cursor = redisTemplate.opsForHash().scan(key, ScanOptions.scanOptions().count(SCAN_COUNT).build());
                while (cursor.hasNext()) {
                    Map.Entry next = cursor.next();
                    Object field = next.getKey();
                    Object value = next.getValue();
                    if (field != null && value != null) {
                        resultMap.put(field.toString(), objectMapper.convertValue(value, typeReference));
                    }
                }
                cursor.close();
            } catch (Exception e) {
                logger.error("扫描redis获取hashKey所有缓存异常", e);
            }
        }
        return resultMap;
    }


    public Long hDel(int shardingCount, String key, String... field) {
        long count = 0;
        if (field != null) {
            for (Map.Entry> entry : separateToMap(shardingCount, key, field).entrySet()) {
                count += redisTemplate.opsForHash().delete(entry.getKey(), entry.getValue().toArray());
            }
        }
        return count;
    }

    private Map> separateToMap(int shardingCount, String key, String[] field) {
        Map> tmpMap = new HashMap<>();
        for (String s : field) {
            if (StringUtils.isNotBlank(s)) {
                int num = Math.abs(s.hashCode() % shardingCount);
                String hashKey = String.format("%s$%s", key, num);
                tmpMap.computeIfAbsent(hashKey, k -> new ArrayList<>()).add(s);
            }
        }
        return tmpMap;
    }

    public void listSet(String key, List list) {
        if (!CollectionUtils.isEmpty(list)) {
            redisTemplate.opsForList().rightPushAll(key, list);
        }
    }

    public long listSize(String key) {
        return redisTemplate.opsForList().size(key);
    }

    public  Map> listGet(String prefix, TypeReference typeReference) {
        Map> result = new ConcurrentHashMap<>();
        Set setKeys = redisTemplate.keys(prefix + "*");
        setKeys.parallelStream().forEach(hashKey -> {
            try {
                List list = listGet(typeReference, hashKey);
                result.put(hashKey, list);
            } catch (Exception e) {
                logger.error("获取hashKey为" + hashKey + "的redis缓存列表失败", e);
            }
        });
        return result;
    }

    public  List listGet(TypeReference typeReference, String key) {
        List resultList = new LinkedList<>();
        long size = redisTemplate.opsForList().size(key);
        for (int index = 0; index < size; index = index + 1000) {
            List objList = new ArrayList<>();
            try {
                objList = redisTemplate.opsForList().range(key, index, 1000);
                objList.forEach(obj->{
                    resultList.add(objectMapper.convertValue(obj, typeReference));
                });
            } catch (Exception e) {
                logger.error(ExceptionUtils.getStackTrace(e));
            }
        }
        return resultList;
    }

    public void listDeleteOneList(String key) {
        deleteKey(key);
    }


    public Long listDel(String key, long count, Object value) {
        return redisTemplate.opsForList().remove(key, count, value);
    }

    public boolean deleteKey(String key) {
        return redisTemplate.delete(key);
    }

    public Set getKeys(String pattern) {
        return getKeys(pattern, 10, TimeUnit.SECONDS);
    }

    public Set getKeys(String pattern, int timeout, TimeUnit timeUnit) {
        Set keys = new HashSet<>();
        CompletableFuture> getRedisKeysFuture = CompletableFuture.supplyAsync(() -> {
            try {
                return redisTemplate.keys(pattern);
            } catch (Exception e) {
                logger.error("获取redis中包含{}的key失败:", pattern, e);
                return null;
            }
        }).whenComplete((result, ex) -> {
            if (result != null) {
                keys.addAll(result);
            }
        });
        try {
            getRedisKeysFuture.get(timeout, timeUnit);
        } catch (Exception e) {
            logger.error("获取redis中包含{}的key超时:", pattern, e);
        }
        return keys;

    }

    public void strSet(String key, String value) {
        redisTemplate.opsForValue().set(key, value);
    }

    public void strSet(String key, String value, long timeout, TimeUnit timeUnit) {
        redisTemplate.opsForValue().set(key, value, timeout, timeUnit);
    }


    public String strGet(String key) {
        Object object = redisTemplate.opsForValue().get(key);
        if (object != null) {
            return object.toString();
        }
        return null;
    }

    public void expire(String key, long timeout, TimeUnit unit) {
        redisTemplate.expire(key, timeout, unit);
    }

    public void hLSetToOneGroup(String key, List list) {
        if (!CollectionUtils.isEmpty(list)) {
            redisTemplate.opsForList().leftPushAll(key, list);
        }
    }

    public  List hLGetFromOneGroup(TypeReference typeReference, String key) {
        List result = new ArrayList<>();
        if (StringUtils.isNotBlank(key)) {
            long size = Optional.ofNullable(redisTemplate.opsForList().size(key)).orElse(0L);
            List objects = redisTemplate.opsForList().range(key, 0, size);
            if (!CollectionUtils.isEmpty(objects)) {
                for (Object o : objects) {
                    result.add(o != null ? objectMapper.convertValue(o, typeReference) : null);
                }
            }
        }
        return result;
    }

    public void setTypeAdd(String key, Object value) {
        redisTemplate.opsForSet().add(key, value);
    }

    public void setTypeAddAll(String key, Set value) {
        redisTemplate.opsForSet().add(key, value.toArray(new Object[1]));
    }

    public Set setTypeGet(String key) {
        return redisTemplate.opsForSet().members(key);
    }


    public boolean setTypeContain(String key, String value) {
        return redisTemplate.opsForSet().isMember(key, value);
    }
}
 
  

4、Redis链接配置工具类


import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;

@Configuration
@ConditionalOnProperty(name = "spring.redis.host")
public class RedisConfiguration {

    @Value("${spring.redis.host}")
    private String host ;

    @Value("${spring.redis.port}")
    private int port ;

    @Value("${spring.redis.password}")
    private String password ;

    @Value("${spring.redis.timeout:3000}")
    private int timeout ;

    @Value("${redis.jedisPoolConfig.maxActive:300}")
    private int maxActive ;

    @Value("${redis.jedisPoolConfig.maxIdle:100}")
    private int maxIdle ;

    @Value("${redis.jedisPoolConfig.minIdle:1}")
    private int minIdle ;

    @Value("${redis.jedisPoolConfig.maxWait:5000}")
    private int maxWait ;

    @Value("${spring.redis.database:0}")
    private int database;

    @Bean
    public RedisTemplate redisTemplate(RedisConnectionFactory redisConnectionFactory){
        RedisTemplate redisTemplate = new RedisTemplate<>();
        redisTemplate.setConnectionFactory(redisConnectionFactory);
        Jackson2JsonRedisSerializer jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer<>(Object.class);
        ObjectMapper objectMapper = new ObjectMapper();
//        objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
//        objectMapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
        jackson2JsonRedisSerializer.setObjectMapper(objectMapper);
        StringRedisSerializer stringRedisSerializer = new StringRedisSerializer();
        redisTemplate.setKeySerializer(stringRedisSerializer);
        redisTemplate.setHashKeySerializer(stringRedisSerializer);
        redisTemplate.setValueSerializer(jackson2JsonRedisSerializer);
        redisTemplate.setHashValueSerializer(jackson2JsonRedisSerializer);
        redisTemplate.afterPropertiesSet();
        return redisTemplate;
    }

    @Bean
    public JedisPool JedisPoolFactory() {
        JedisPoolConfig poolConfig = new JedisPoolConfig();
        poolConfig.setMaxTotal(maxActive);
        poolConfig.setMaxIdle(maxIdle);
        poolConfig.setMinIdle(minIdle);
        poolConfig.setMaxWaitMillis( maxWait);
        JedisPool jedisPool =  new JedisPool(poolConfig,host,port,timeout,password,database);
        return jedisPool;
    }
}
 
  

5、配置文件内容

spring:
  redis:
    host: localhost
    port: 6379
    #password:

你可能感兴趣的:(Redis,redis,数据库,java)