使用python脚本解析本地日志文件,并上传至TDengine

import taos
import re
import time


def init_database_re(p_config,p_hostname, p_user='root', p_password='taosdata'):
    '''
    @parameter
    taosd_host_name:   TDengine服务器的hostname
    user:              TDengine服务器的登录user 
    passwd:            TDengine服务器登录user对应的password
    
    @return
    conn:       连接TDengine数据库使用的connection句柄
    c1:         操作TDengine数据库使用的cursor句柄
    pattern:    用于匹配fastlog中每行数据的正则表达式
    
    @function
    1. 如果不存在,为远端TDengine创建suricata数据库
    2. 如果不存在,为远端TDengine中的suricat数据库创建表fastlog
    3. 预先编译正则表达式对象,用于匹配fast.log中的每行数据
    '''    
    conn = taos.connect(host= p_hostname, user=p_user,password=p_password, config=p_config)
    c1 = conn.cursor()
    try:
        c1.execute('create database if not exists suricata')
        c1.execute('use suricata')
        c1.execute('create table if not exists fastlog(ts timestamp, type binary(100), priority int, protocol binary(15), srcip binary(20), srcport int, dstip binary(20), dstport int)')
    except Exception as err:
        conn.close()
        raise(err)
    pattern = re.compile(r'(.*) (\[\*\*\]) (\[.*\]) (.*) (\[\*\*\]) (\[)(.*): (.*)(\]) (\[)(.*): (.*)(\]) (\{)(.*)(\}) (.*):(\d+) -> (.*):(\d+)')
    return conn, c1, pattern

def insert(p_conn,p_c1,p_ts,p_type,p_priority,p_protocol,p_srcip,p_srcport,p_dstip,p_dstport):
    '''
    @param
    p_conn:     连接TDengine数据库的connection句柄
    p_c1:       向suricata.fastlog插入数据使用的cursor句柄
    p_ts:       告警产生的时间
    p_type:     告警类型
    p_priority: 告警优先级
    p_protocol: 告警packet的网络协议
    p_srcip:    告警packet的源IP
    p_srcport:  告警packet的源端口
    p_dstip:    告警packet的目的IP
    p_dstport:  告警packet的目的端口
    
    @function:
    1. 根据参数构建insert sql
    2. 使用构造的insert sql向TDengine中的suricata.fastlog插入数据
    '''
    sql_fixed = "insert into suricata.fastlog(ts, type, priority, protocol, srcip, srcport, dstip, dstport)"
    sql_value = "values("+p_ts+",'"+p_type+"',"+p_priority+",'"+p_protocol+"','"+p_srcip+"',"+p_srcport+",'"+p_dstip+"',"+p_dstport+")"
    try:
        p_c1.execute(sql_fixed+sql_value)
        print(sql_fixed+sql_value)
    except Exception as err:
        p_conn.close()
        raise(err)

def upload(p_filename, p_conn, p_c1, p_pattern):
    with open(p_filename) as f:
        for line in f:
            result = p_pattern.match(line)
            ts = int(time.mktime(time.strptime(result.group(1)[:-8], '%m/%d/%Y-%H:%M:%S')))*1000 + int(result.group(1)[-6:-3])
            insert(p_conn, p_c1, str(ts), result.group(8), result.group(12), result.group(15), result.group(17), result.group(18), result.group(19), result.group(20))
    p_conn.close()
    

conn, c1, pattern = init_database_re(p_config="C:\\TDengine\\cfg",p_hostname="fmr-dev-centos7", p_user='root', p_password='taosdata')
upload(p_filename="./fast.log", p_conn=conn, p_c1=c1, p_pattern=pattern)

你可能感兴趣的:(使用python脚本解析本地日志文件,并上传至TDengine)