通过shell脚本抓取存储home用户的空间使用情况,写到excel文件里,再导入到mysql数据库,最后通过grafana进行展示

vi aa.sh

#!/bin/bash

Date=date +"%Y-%m-%d %H:%M:%S"
Date2=date +"%Y-%m-%d"

Dir=/logs/Homes

/bin/rm -rf $Dir/quota2.txt
/bin/touch $Dir/quota2.txt

/usr/bin/ssh 10.0.0.10 "quota report -x" | grep home > $Dir/quota.txt
/usr/bin/ssh 10.0.0.20 "quota report -x" | grep home2 >> $Dir/quota.txt

/bin/cat $Dir/quota.txt | awk -F[:" "]+ '{print $2}' | tr -d "*" | grep -v -w "root" > $Dir/users.txt

for i in cat $Dir/users.txt
do
Used=cat $Dir/quota.txt | grep -w $i | awk -F[:" "]+ '{print $5}'
Home=cat $Dir/quota.txt | grep -w $i | awk -F[:" "]+ '{print $3}'
Filen=cat $Dir/quota.txt | grep -w $i | awk -F[:" "]+ '{print $8}'
/bin/echo "$Date,$i,$Home,$Used,$Filen" >> $Dir/quota2.txt
done

/bin/cat $Dir/quota2.txt | grep $Date2 > $Dir/quota3.txt

/bin/cat $Dir/quota3.txt | tr -s '[:blank:]' > $Dir/quota4.csv

/usr/bin/mysql -h 10.0.0.2 -u icinga2 -p'xxxx' --local-infile=1 ming -e "LOAD DATA LOCAL INFILE '/logs/Homes/quota4.csv' INTO TABLE userquota FIELDS TERMINATED BY ','"

:wq

mysql -u root -p'xxxx'

create database ming default character set utf8;
create table userquota(time datetime,user char(50),home char(50),used float,filen float);
grant all on ming.* to aa@'localhost' identified by 'xxxx';
exit

grafana配mysql略

查询用户总数:

select UNIX_TIMESTAMP(time) as time_sec, count(*) as Total from userquota group by time_sec;

查询各home用户数:

select UNIX_TIMESTAMP(time) as time_sec, count(*) as home8 from userquota where home='home8' group by time_sec;

查询单个人空间使用情况语句:

SELECT UNIX_TIMESTAMP(time) as time_sec, used as 'zhi.yang' FROM userquota WHERE user='zhi.yang';

查询平均空间使用情况语句:

select Unix_timestamp(time) as time_sec, avg(used) from userquota group by time_sec;

统计各空间段的人数:

select unix_timestamp(time) as time_sec, count(*) as '=0' from userquota where used = '0' group by time_sec;

select unix_timestamp(time) as time_sec, count(*) as '<=50G' from userquota where used > '0' and used <= '52144000' group by time_sec;

select unix_timestamp(time) as time_sec, count(*) as '50G '52144000' and used <= '112144000' group by time_sec;