##日志分割集中管理

  • hosts: webs
    become: yes
    vars:
    app: tomcat_log_split
    tasks:
    • name: "{{ app }}"
      script: /opt/tags/ansible/playbooks/log_scripts/{{ app }}.sh {{ ENV }} {{ TIME }}
  • hosts: services
    become: yes
    vars:
    app: service_log_split
    tasks:
    • name: "{{ app }}"
      script: /opt/tags/ansible/playbooks/log_scripts/{{ app }}.sh {{ ENV }} {{ TIME }}
  • hosts: mongodb
    become: yes
    vars:
    app: mongo_log_split
    tasks:
    • name: "{{ app }}"
      script: /opt/tags/ansible/playbooks/log_scripts/{{ app }}.sh {{ ENV }} {{ TIME }}
  • hosts: openresty
    become: yes
    vars:
    app: openresty_log_split
    tasks:
    • name: "{{ app }}"
      script: /opt/tags/ansible/playbooks/log_scripts/{{ app }}.sh {{ ENV }} {{ TIME }}
  • hosts: nginx
    become: yes
    vars:
    app: nginx_log_split
    tasks:
    • name: "{{ app }}"
      script: /opt/tags/ansible/playbooks/log_scripts/{{ app }}.sh {{ ENV }} {{ TIME }}

Tomcat日志:
#!/bin/bash
set -x
#tomcat日志分割脚本
#判断参数个数
if [ $# != 2 ] ; then
echo "USAGE: $0 ENV TIME"
exit 1;
fi
ENV=$1
TIME=$2
IP=/sbin/ifconfig eth0 |grep "inet addr" |awk {'print $2'} |awk -F ':' {'print $2'}
ACCESS_LOG_TIME=date +%Y-%m-%d
S3_DIR="s3://**-logs/${ENV}/web"
#日志文件存放目录
LOCAL_DIR="/opt/tomcatwebsvr
/logs"
#item目录
ITEM_DIR="/opt/tomcatwebsvr"
#压缩后缀名
BAK_SUFFIX=".gz"
#判断日志备份目录是否存在
for logs_bak_dir in ls ${LOCAL_DIR} -d;
do
if [ -d ${logs_bak_dir}/logs_bak ];then
echo "dir exist!"
else
mkdir -p ${logs_bak_dir}/logs_bak
fi
done
#只处理以log、out、txt结尾的日志文件,排除try.log等3个日志
for logfile in `find ${LOCAL_DIR} | grep -v "try.log$|reg.log$|login.log$" | egrep "
[a-z].log$| [a-z].out$| ${ACCESS_LOG_TIME}.txt$";
do
#强制cp并保留属性
/bin/cp -af ${logfile} ${logfile}.${TIME}
echo -n "" > ${logfile}
#压缩日志文件并保存到指定目录
gzip -c ${logfile}.${TIME} > ${logfile}+${TIME}+${BAK_SUFFIX}
mv ${logfile}+${TIME}+${BAK_SUFFIX} ${logfile%/*}/logs_bak/
done
#上传文件到s3,删除对应的文件
for project in
ls ${ITEM_DIR} -d |awk -F '/' '{print $NF}';
do
aws s3 sync /opt/${project}/logs/logs_bak/ ${S3_DIR}/${project}/${IP} --exclude "*" --include "*.log+${TIME}+${BAK_SUFFIX}"
aws s3 rm ${S3_DIR}/${project}/${IP} --recursive --exclude "*" --include "*
date -d "-90 days" +%Y%m%d*${BAK_SUFFIX}"
aws s3 rm ${S3_DIR}/${project}/${IP} --recursive --exclude "*" --include "*
date -d "-90 days" +%Y-%m-%d*${BAK_SUFFIX}"
done
#删除30天前的日志
find ${LOCAL_DIR}/ -name "*
date -d "-7 days" +%Y%m%d`*" |xargs rm -rf

Nginx日志:
#!/bin/bash
set -x
#判断参数个数
if [ $# != 2 ] ; then
echo "USAGE: $0 ENV"
exit 1;
fi
#日志文件存放目录
ENV=$1
TIME=$2
IP=/sbin/ifconfig eth0 |grep "inet addr" |awk {'print $2'} |awk -F ':' {'print $2'}
S3_DIR="s3://**-logs/${ENV}/nginx"
LOG_FILE="/etc/nginx/logs"
#压缩后缀名
BAK_SUFFIX=".gz"
#判断日志备份目录是否存在
for logs_bak_dir in ls ${LOG_FILE} -d;
do
if [ -d ${logs_bak_dir}/logs_bak ];then
echo "dir exist!"
else
mkdir -p ${logs_bak_dir}/logs_bak
fi
done
#备份日志主要逻辑
for logfile in find ${LOG_FILE} |grep "log$";
do
#强制cp并保留属性
/bin/cp -af ${logfile} ${logfile}.${TIME}
echo -n "" > ${logfile}
#压缩日志文件并保存到指定目录
gzip -c ${logfile}.${TIME} > ${logfile}+${TIME}+${BAK_SUFFIX}
mv ${logfile}+${TIME}+${BAK_SUFFIX} ${logfile%/
}/logs_bak/
done
#重启服务
/etc/init.d/nginx reload
#s3上传并删除指定日期文件
aws s3 sync ${LOG_FILE}/logs_bak/ ${S3_DIR}/${IP} --exclude "" --include "${TIME}+${BAK_SUFFIX}"
aws s3 rm ${S3_DIR}/${IP} --recursive --exclude "" --include "date -d "-90 days" +%Y%m%d${BAK_SUFFIX}"
#删除30天前的日志
find ${LOG_FILE}/ -name "
${BAK_SUFFIX}" -mtime +30 -delete
find ${LOG_FILE}/ -name "date -d "-3 days" +%Y%m%d" |xargs rm -rf

Service日志:
#!/bin/bash
set -x
#判断参数个数
if [ $# != 2 ] ; then
echo "USAGE: $0 ENV TIME"
exit 1;
fi
ENV=$1
TIME=$2
IP=/sbin/ifconfig eth0 |grep "inet addr" |awk {'print $2'} |awk -F ':' {'print $2'}
TYPE="service"
S3_DIR="s3://**-logs/${ENV}/service"
#日志文件存放目录
LOCAL_DIR="/opt/platform/services/
//logs"
#日志文件过滤
LOG_FILE="/opt/platform/services/
//logs/.log"
#不同类型日志目录
TYPE_DIR="/opt/platform/services//"
#压缩后缀名
BAK_SUFFIX=".gz"
#判断日志备份目录是否存在
for logs_bak_dir in ls ${LOCAL_DIR} -d;
do
if [ -d ${logs_bak_dir}/logs_bak ];then
echo "dir exist!"
else
mkdir -p ${logs_bak_dir}/logs_bak
fi
done
#备份日志主要逻辑
for logfile in find ${LOG_FILE} |grep "log$";
do
#强制cp并保留属性
/bin/cp -af ${logfile} ${logfile}.${TIME}
echo -n "" > ${logfile}
#压缩日志文件并保存到指定目录
gzip -c ${logfile}.${TIME} > ${logfile}+${TIME}+${BAK_SUFFIX}
mv ${logfile}+${TIME}+${BAK_SUFFIX} ${logfile%/}/logs_bak/
done
#同步到s3并删除s3指定日期日志
for project in ls ${TYPE_DIR} -d |awk -F '/' '{print $NF}';
do
aws s3 sync /opt/platform/services/
/${project}/logs/logs_bak/ ${S3_DIR}/${project}/${IP} --exclude "" --include "${TIME}+${BAK_SUFFIX}"
aws s3 rm ${S3_DIR}/${project}/${IP} --recursive --exclude "" --include "date -d "-90 days" +%Y%m%d${BAK_SUFFIX}"
done
#删除7天前日志
find ${LOCAL_DIR}/ -name "
date -d "-3 days" +%Y%m%d*" |xargs rm -rf

Mongo日志:
#!/bin/bash
set -x
#判断参数个数
if [ $# != 2 ] ; then
echo "USAGE: $0 ENV TIME"
exit 1;
fi
#日志文件存放目录
ENV=$1
TIME=$2
IP=/sbin/ifconfig eth0 |grep "inet addr" |awk {'print $2'} |awk -F ':' {'print $2'}
NAME="mongo"
S3_DIR="s3://**-logs/${ENV}/other"
LOG_FILE="/var/log/
/mongod.log"
LOCAL_DIR="/var/log/mongo"
BAK_SUFFIX=".gz"
#判断日志备份目录是否存在
for logs_bak_dir in ls ${LOCAL_DIR} -d;
do
if [ -d ${logs_bak_dir}/logs_bak ];then
echo "dir exist!"
else
mkdir -p ${logs_bak_dir}/logs_bak
fi
done
#日志文件过滤
for LOG in find ${LOG_FILE} |grep "log$";do
#强制cp并保留属性
/bin/cp -af ${LOG} ${LOG}.${TIME}
echo -n "" > ${LOG}
#压缩日志文件
gzip -c ${LOG}.${TIME} > ${LOG}+${TIME}+${BAK_SUFFIX}
mv ${LOG}+${TIME}+${BAK_SUFFIX} ${LOG%/
}/logs_bak/
done;
#上传文件到s3
aws s3 sync ${LOCAL_DIR}/logs_bak/ ${S3_DIR}/${NAME}/${IP} --exclude "" --include "${TIME}+${BAK_SUFFIX}"
aws s3 rm ${S3_DIR}/${NAME}/${IP} --recursive --exclude "" --include "date -d "-90 days" +%Y%m%d${BAK_SUFFIX}"
#删除7天前日志
find ${LOCAL_DIR} -name "mongo
${BAK_SUFFIX}" -mtime +7 |xargs rm -rf

Openresty日志:
#!/bin/bash
set -x
#判断参数个数
if [ $# != 2 ] ; then
echo "USAGE: $0 ENV"
exit 1;
fi
#日志文件存放目录
ENV=$1
TIME=$2
IP=/sbin/ifconfig eth0 |grep "inet addr" |awk {'print $2'} |awk -F ':' {'print $2'}
S3_DIR="s3://**-logs/${ENV}/openresty"
LOG_FILE="/opt/openresty/log/nginx"
#压缩后缀名
BAK_SUFFIX=".gz"
#判断日志备份目录是否存在
for logs_bak_dir in ls ${LOG_FILE} -d;
do
if [ -d ${logs_bak_dir}/logs_bak ];then
echo "dir exist!"
else
mkdir -p ${logs_bak_dir}/logs_bak
fi
done
#备份日志主要逻辑
for logfile in find ${LOG_FILE} |grep "log$";
do
#强制cp并保留属性
/bin/cp -af ${logfile} ${logfile}.${TIME}
echo -n "" > ${logfile}
#压缩日志文件并保存到指定目录
gzip -c ${logfile}.${TIME} > ${logfile}+${TIME}+${BAK_SUFFIX}
mv ${logfile}+${TIME}+${BAK_SUFFIX} ${logfile%/
}/logs_bak/
done
#重启服务
docker exec openresty /usr/local/openresty/nginx/sbin/nginx -c /opt/openresty/nginx/nginx.conf -s reopen
#s3上传并删除指定日期文件
aws s3 sync ${LOG_FILE}/logs_bak/ ${S3_DIR}/${IP} --exclude "" --include "${TIME}+${BAK_SUFFIX}"
aws s3 rm ${S3_DIR}/${IP} --recursive --exclude "" --include "date -d "-90 days" +%Y%m%d${BAK_SUFFIX}"
#删除30天前的日志
find ${LOG_FILE}/ -name "
${BAK_SUFFIX}" -mtime +30 -delete
find ${LOG_FILE}/ -name "date -d "-3 days" +%Y%m%d" |xargs rm -rf

Crontab
#59 23 /bin/sh /opt/backup/nginx_log_split.sh >/dev/null 2>&1
#59 23
/bin/sh /opt/backup/tomcat_log_split.sh >/dev/null 2>&1
#59 23 /bin/sh /opt/backup/service_log_split.sh >/dev/null 2>&1
#59 23
/bin/sh /opt/backup/mongo_log_split.sh >/tmp/mongo.log 2>&1