flume拦截器的使用

例子需求说明:

  1. 我们现在需要将日志中的数据读取到kafka当中
  2. 且需要区分数据的,分别写入到两个不同的主题当中

flume作业conf配置如下:

source : taildir 实现断点续传
channel : 使用kafkachannel 写入到两个主题当中
sink : 没有使用
拦截器: 使用i1,i2两个拦截器
i1:做数据的清理, 防止脏数据,ETL拦截器
i2:做头部信息添加, 分类型拦截器
选择器:根据头部信息进行输出到kafka的哪个主题当中

a1.channels=c1 c2
a1.sources=r1
#
a1.channels=c1 c2
a1.sources=r1
#
a1.channels=c1 c2
a1.sources=r1
#
a1.sources.r1.type = TAILDIR
a1.sources.r1.positionFile = /opt/module/flume/test/taildir_position1.json
a1.sources.r1.filegroups = f1
a1.sources.r1.filegroups.f1 = /tmp/logs/app.+
a1.sources.r1.fileHeader = true

# channel
a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel
a1.channels.c1.kafka.bootstrap.servers = linux101:9092,linux102:9092,linux103:9092
a1.channels.c1.kafka.topic = topic_start
a1.channels.c1.kafka.consumer.group.id = flume-consumer
a1.channels.c1.parseAsFlumeEvent = false

a1.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel
a1.channels.c2.kafka.bootstrap.servers = linux101:9092,linux102:9092,linux103:9092
a1.channels.c2.kafka.topic = topic_event
a1.channels.c2.kafka.consumer.group.id = flume-consumer
a1.channels.c2.parseAsFlumeEvent = false
# 拦截器
a1.sources.r1.interceptors=i1 i2
a1.sources.r1.interceptors.i1.type=com.dxy.LogETLInterceptor$Builder
a1.sources.r1.interceptors.i2.type=com.dxy.LogTypeInterceptor$Builder
#选择器
a1.sources.r1.selector.type = multiplexing
a1.sources.r1.selector.header = topic
a1.sources.r1.selector.mapping.topic_start = c1
a1.sources.r1.selector.mapping.topic_event = c2
# 绑定
a1.sources.r1.channels = c1 c2

依赖的导入:


<dependencies>

        <dependency>
            <groupId>org.apache.flumegroupId>
            <artifactId>flume-ng-coreartifactId>
            <version>1.7.0version>
        dependency>
    dependencies>

    <build>
        <plugins>
            <plugin>
                <artifactId>maven-compiler-pluginartifactId>
                <version>3.8.0version>
                <configuration>
                    <source>1.8source>
                    <target>1.8target>
                configuration>
            plugin>
            <plugin>
                <artifactId>maven-assembly-pluginartifactId>
                <configuration>
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependenciesdescriptorRef>
                    descriptorRefs>
                configuration>
                <executions>
                    <execution>
                        <id>make-assemblyid>
                        <phase>packagephase>
                        <goals>
                            <goal>singlegoal>
                        goals>
                    execution>
                executions>
            plugin>
        plugins>
    build>

ETL清洗拦截器:

package com.dxy;

import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.interceptor.Interceptor;

import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;

//ETL清洗拦截器
public class LogETLInterceptor implements Interceptor {


    @Override
    public void initialize() {

    }

    @Override // 处理单事件
    public Event intercept(Event event) {

        byte[] bytes = event.getBody();
        String str = new String(bytes, Charset.forName("UTF-8"));

        //启动日志和事件日志的校验规则不一样 先进行区分
        if(str.contains("start")){

        //启动日志   说明:LogUtils工具类是一些脏数据判定规则
            if(LogUtils.validateStartLog(str)) {
                    return event;
            }

        }else{

            //事件日志
            if(LogUtils.validateEventLog(str)) {
                return event;
            }
        }

        return null;
    }

    @Override //处理多事件
    public List<Event> intercept(List<Event> list) {

        ArrayList<Event> arrayList = new ArrayList<>();
        for (Event event : list) {
            Event intercept = intercept(event);
            if(intercept!=null){
                arrayList.add(intercept);
            }
        }

         return arrayList;
    }
    //flume的拦截器必须要创建一个静态类对象
    public  static  class  Builder implements   Interceptor.Builder{

        @Override
        public Interceptor build() {
            return new LogETLInterceptor();
        }

        @Override
        public void configure(Context context) {

        }
    }


    @Override
    public void close() {

    }
}


分类型拦截器:

package com.dxy;

import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.interceptor.Interceptor;

import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

//分类型拦截器
public class LogTypeInterceptor implements Interceptor {
    @Override
    public void initialize() {

    }

    @Override
    public Event intercept(Event event) {

        byte[] body = event.getBody();

        String s = new String(body, Charset.forName("UTF-8"));

        Map<String, String> header = event.getHeaders();
        //如果日志数据中包含start字符串,就添加topic_start,就添加topic_event
        if(s.contains("start")){

            header.put("topic","topic_start");
        }else{
            header.put("topic","topic_event");

        }
        return event;
    }

    @Override//多事件处理 :  进行数据的缓冲
    public List<Event> intercept(List<Event> list) {
        ArrayList<Event> arrayList = new ArrayList<>();

        for (Event event : list) {

            Event intercept = intercept(event);
            arrayList.add(intercept);
        }
        return arrayList;
    }

    //静态类
    public static class Builder implements Interceptor.Builder{

        @Override
        public Interceptor build() {
            return new LogTypeInterceptor();
        }

        @Override
        public void configure(Context context) {

        }
    }

    @Override
    public void close() {

    }
}

你可能感兴趣的:(flume,flume)