1.下载
lai@120-test:/data/autoadmin$ more autodownload
#!/bin/bash logfile=/data/autoadmin/updown.log echo "--------------------------------------------------------------->>>>>" >> $logfile
echo "$(date +%Y'年'%m'月'%d'日'%H':'%M':'%S) 此次要下载并替换的文件是:" >> $logfile
cat conf >> $logfile
echo "下载替换过程:" >> $logfile
##下载并替换目录中的文件,和同步上传时相反的过程
awk '{for(i=2;i<=NF;i++) system("scp [email protected]:"$1"/"$i" " $1)}' conf
2.上传同步
lai@120-test:/data/autoadmin$ more autoupload
#!/bin/bash
logfile=/data/autoadmin/updown.log
echo "-------------------------------------------------------------<<<<<" >> $logfile
echo "$(date +%Y'年'%m'月'%d'日'%H':'%M':'%S) 此次要上传同步的文件是:" >> $logfile
cat conf >> $logfile echo "上传同步过程:" >> $logfile
awk '{for(i=2;i<=NF;i++) system("scp "$1"/"$i" [email protected]:"$1)}' conf
3.屏幕输出保存到文件-script命令
lai@120-test:/data/autoadmin$ more script.sh
#!/bin/bash logfile=/data/autoadmin/updown.log
##执行同步上传及下载文件的脚本,并且把上传下载时的屏幕输出捕获到日志文件updown.log。
#script -a -c /data/autoadmin/autodownload $logfile
#script -a -c /data/autoadmin/autotmp $logfile script -a -c /data/autoadmin/autoupload $logfile
echo " " >> $logfile
4.配置文件conf的格式
lai@120-test:/data/autoadmin$ more conf
/data/webroot/www 111.txt 111.txt
5. mac os 10.0.6下的自动分片ts文件的小脚本
Lai-Mac:shelltest laijingli$ more autosegmenter
#!/bin/bash
### This is a shell to auto segmente,mkdir,rename
## get the will be segmented detail file name list
ls -l ../macftp/ > ftp.txt
## get the file name list only include the name and Extension
awk '{print $9}' ftp.txt > filelist.txt
## delelte the first null line
sed -i -e '1d' filelist.txt
## make new dir with the file name,and segmente the files in the filelist.txt,and put them into their own dir
awk -F'.' '{system("mkdir /Users/laijingli/video/"$1) system("cat /Users/laijingli/macftp/"$1".ts |mediastreamsegmenter -b http://192.168.206.22:8080/aim/ts/"$1 " -f /Users/laijingli/video/"$1 " -l seglog_$(date +%Y%m%d).log -s 2000 -i index_video.m3u8 -B "$1"-")}' filelist.txt
##切分aac格式的音频
awk -F'.' '{system("cp /Users/laijingli/shelltest/index.m3u8 /Users/laijingli/video/"$1) system("cat /Users/laijingli/macftp/"$1".ts |mediastreamsegmenter -b http://192.168.206.22:8080/aim/ts/"$1 " -f /Users/laijingli/video/"$1 " -l seglog_$(date +%Y%m%d).log -s 2000 -a -i index_aac.m3u8 -B " "aac-"$1"-")}' filelist.txt
## when segmente finished,this script will auto upload the segmented files to web server
# scp -r /Users/laijingli/video/* [email protected]:/data/iphonevideo/.
## when all the segmented files finished upload,delete the segmented files and the dir
# rm -fr /Users/laijingli/video/*
6. m3u8文件的格式
Lai-Mac:shelltest laijingli$ more index.m3u8
#EXTM3U
#EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=350000 index_video.m3u8
#EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=64000, CODECS="mp4a.40.5" index_aac.m3u8
7.自动分析log的小脚本
root@120-test:/data/autoadmin/logfilter# more autoauth
#!/bin/bash
ip=$(ifconfig eth0|awk -F: 'NR==2 {print $2}'|awk '{print $1}')
logtime=$(date +%Y%m%d --date='1 days ago')
oldtime=$(date +%Y%m%d --date='7 days ago')
#定义所要分析日志grep前一天日期的格式 如May 27
dateformat=$(date +%b" "%d --date='1 days ago')
fix=_login
rm $logtime$fix
#分析日志保留7天
if [ -f $oldtime$fix ]; then
rm -f $oldtime$fix
fi
###sshd 前天登录成功记录####
echo "">>$logtime$fix
echo "统计时间:$(date +%Y'年'%m'月'%d'日' --date='1 days ago')">>$logtime$fix
echo "统计主机:$ip">>$logtime$fix
echo "">>$logtime$fix
echo "################### $ip statistics Begin ###################">>$logtime$fix
echo "">>$logtime$fix
echo "--------------------- SSH Login Begin ------------------------ ">>$logtime$fix
echo "SSH登录成功的详细统计:">>$logtime$fix
cat /var/log/auth.log|grep "$dateformat"|grep 'Accepted password' |awk '{print $9" "$11}'|sort|uniq -c >>$logtime$fix
###sshd 前天登录失败记录####
echo "">>$logtime$fix
echo "SSH登录失败ip次数统计:">>$logtime$fix
#统计用户名正确的且登录失败的ip尝试登录失败的次数
echo "用户名正确:">>$logtime$fix
cat /var/log/auth.log|grep "$dateformat"|grep "Failed password" |grep -v "invalid"|awk '{print $11}'|sort|uniq -c >>$logtime$fix
#统计用户名不正确的且登录失败的ip尝试登录失败的次数
echo "用户名不正确:">>$logtime$fix
cat /var/log/auth.log|grep "$dateformat"|grep "Failed password" |grep "invalid"|awk '{print $13}'|sort|uniq -c >>$logtime$fix
echo "">>$logtime$fix
echo "SSH登录失败用户名次数统计:">>$logtime$fix
#统计用户名正确的且登录失败的用户名的次数
cat /var/log/auth.log|grep "$dateformat"|grep "Failed password" |grep -v "invalid"|awk '{print $9}'|sort|uniq -c >>$logtime$fix
#统计用户名不正确的且登录失败的用户名的次数
cat /var/log/auth.log|grep "$dateformat"|grep "Failed password" |grep "invalid"|awk '{print $11}'|sort|uniq -c >>$logtime$fix
echo "--------------------- SSH Login End ------------------------ ">>$logtime$fix
###磁盘空间使用情况####
echo "">>$logtime$fix
echo "--------------------- Disk Space Begin ------------------------ ">>$logtime$fix
df -h >>$logtime$fix
echo "--------------------- Disk Space End -------------------------- ">>$logtime$fix
###kern.log分析####
echo "">>$logtime$fix
echo "--------------------- Kern.log Begin ------------------------ ">>$logtime$fix
cat /var/log/kern.log|grep "$dateformat">>$logtime$fix
echo "--------------------- Kern.log End -------------------------- ">>$logtime$fix
###syslog分析####
echo "">>$logtime$fix
echo "--------------------- Syslog Begin ------------------------ ">>$logtime$fix
cat /var/log/syslog|grep "$dateformat"|grep -v "CRON">>$logtime$fix
echo "--------------------- Syslog End -------------------------- ">>$logtime$fix
echo "">>$logtime$fix
echo "################### $ip statistics End #####################">>$logtime$fix