黑名单防护

如果识别用户属于非正常用户,那么我们需要采取一些措施来规避风险,首先想到的就是我要封这个ip一段时间,以前听说过静态的白名单,黑名单,通过配置过滤ip,现在我们主要的使用场景是,如果某个用户非法链接被侦察到5次,则将开启ip封锁,冻结ip 24小时,如果用户登录则记录用户信息,假如多次发现该用户非法请求将做账户冻结处理。

实现方案

如何记录用户黑名单呢? 过滤器方案应该考虑并发,如果使用CopyOnWriteArrayList来记录黑名单,则存在一些问题,就是关于黑名单写操作的并发问题,所以我们打算使用google ConcurrentMap进行一些尝试,我们的需求又涉及到时间过期的要求,所以最终我们使用google cache CacheBuilder进行构建并发下的缓存。

试验

下面开始做个试验,首先创建一个filter

@Configuration
@WebFilter(filterName = "sample",urlPatterns = "/*")
@Log
public class SampleFilter implements Filter {


    @Override
    public void init(FilterConfig filterConfig) throws ServletException {
        log.info("init");
    }

    @Override
    public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
        log.info("filter");
        try {
            String ip = getIpAddr((HttpServletRequest) request);
        } catch (Exception e) {
            e.printStackTrace();
        }
        log.info("ip");
        chain.doFilter(request,response);
    }

    @Override
    public void destroy() {
        log.info("destroy");
    }

    public static String getIpAddr(HttpServletRequest request){
        String ip =  request.getRemoteAddr();
        log.info("ip:"+ip );
        return ip;
    }
}

这是一个简单的demo,用于构建一个filter,我们需要加入全局的黑名单过滤器

package com.example.filter;

import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;

import javax.servlet.*;
import javax.servlet.annotation.WebFilter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.springframework.context.annotation.Configuration;

import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;

import lombok.extern.java.Log;

/**
 * Created by JUN on 2017/3/4.
 */
@Configuration
@WebFilter(filterName = "sample",urlPatterns = "/*")
@Log
public class SampleFilter implements Filter {
    /**黑名单过期时间*/
    private static final int EXPIRE_TIME = 24;
    /**黑名单缓存数量*/
    private static final long MAX_SIZE = 2000;
    /**黑名单缓存 ,黑名单中记录了 */
    private static LoadingCache loadingCache;

    /**测试场景,保存所有ips,监控ip访问次数*/
    private static ConcurrentMap ipMap = new ConcurrentHashMap();

    static {
        loadingCache = CacheBuilder.newBuilder().expireAfterWrite(EXPIRE_TIME, TimeUnit.HOURS).maximumSize(MAX_SIZE)
            .build(new CacheLoader() {
                @Override
                public Long load(String key) throws Exception {
                    return null;
                }
            });
    }

    @Override
    public void init(FilterConfig filterConfig) throws ServletException {
        log.info("init");
    }

    @Override
    public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
        HttpServletRequest req=(HttpServletRequest)request;
        //bug - 浏览器访问总是访问两次过滤器,发现/favicon.ico
        if("/favicon.ico".equals(req.getRequestURI())){
            chain.doFilter(request,response);
            return;
        }
        //获取ip
        String ip = request.getRemoteAddr();
        //获取黑名单
        Long ipWrite = null;
        try{
            ipWrite = loadingCache.getUnchecked(ip);
        }catch (Exception e){

        }

        if(ipWrite!=null){
            //封杀对象
            out((HttpServletResponse) response,"refuse!!!!");
            return;
        }
        /**
         * 测试场景:
         * 如果一个用户在1'内访问超过2次,则拉进黑名单
         */
        if(ipMap.containsKey(ip)){
            Long[] info = ipMap.get(ip);
            info[0] = info[0] +1;//计数+1
            log.info("count:" + info[0]);
            if(info[0] > 2){
                Long accessTime = info[1];
                Long currentTime = System.currentTimeMillis();
                log.info("间隔:"+ (currentTime - accessTime));
                if(currentTime - accessTime <= 1000){
                    log.info(ip+ "在["+(currentTime - accessTime)+"]ms内,共访问了["+info[0]+"]");
                    //缓存使用了定期过时,所以过期的业务就不编写了
                    loadingCache.put(ip,currentTime);
                    ipMap.remove(ip);
                    out((HttpServletResponse) response,"refuse!!!!");
                    return;
                }
                info[0] = 0L;
                info[1] = System.currentTimeMillis();
                ipMap.put(ip,info);
            }


        }else{
            log.info("新增监控ip:" + ip);
            Long[] info = new Long[2];
            info[0] = 0L; //访问次数
            info[1] = System.currentTimeMillis(); //当前时间
            ipMap.put(ip,info);
        }


        chain.doFilter(request,response);
    }

    @Override
    public void destroy() {
        log.info("destroy");
    }

    public static void out(HttpServletResponse response,String result){
        try {
            response.setContentType("text/json;charset=utf-8");
            response.setCharacterEncoding("utf-8");
            byte[] bytes=result.getBytes("utf-8");
            response.setContentLength(bytes.length);
            response.getOutputStream().write(bytes);
            response.getOutputStream().flush();
            response.getOutputStream().close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

你可能感兴趣的:(黑名单防护)