Linux记录-告警脚本
#!/bin/bash export JAVA_HOME=/app/jdk/jdk1.8.0_92 export HADOOP_CONF_DIR=/home/hdfs/balancer/hadoop-conf rm -f hostname.txt rm -f dfused.txt rm -f hostdfs.txt rm -f hostdfs_alert.txt ###获取大于95%的主机名和dfs used% function checkdfs(){ echo "start checking hdfs used rate" hdfs dfsadmin -report | grep -i "50010" | awk -F ":" '{print $2}'| awk '{print $1}' > hostname.txt hdfs dfsadmin -report | grep "DFS Used%" | awk -F ": " '{print $2}' | awk -F "%" '{print $1}' > dfused.txt livesum=$(hdfs dfsadmin -report | grep "Live datanodes" | awk '{print $3}' | awk -F "):" '{print $1}' | awk -F "(" '{print $2}') echo $livesum sed -i '1d' dfused.txt let livesum+=1 sed -i $livesum',$d' dfused.txt sed -i $livesum',$d' hostname.txt linesum=$(cat hostname.txt | wc -l) echo $linesum harr=($(awk '{print $1}' hostname.txt)) darr=($(awk '{print $1}' dfused.txt)) if [ $linesum -gt 0 ] then for (( i = 0 ; i < ${#harr[@]} ; i++ )) do for (( j = 0 ; j < ${#darr[@]} ; j++ )) do if [ $i -eq $j ] then echo ${harr[$i]} ":" ${darr[$j]} >> hostdfs.txt fi done done else echo "Not Live Datanodes" fi ddarr=($(cat hostdfs.txt | awk '$3>95{print $3}' |sort -ru)) for (( m = 0; m< ${#ddarr[@]} ; m++ )) do if [[ $(echo $(cat hostdfs.txt) | grep "${ddarr[$m]}") != "" ]] then echo $(cat hostdfs.txt | grep "${ddarr[$m]}" | awk 'NR==1{print $1":"$3}') >> hostdfs_alert.txt fi done } checkdfs rm -f key.txt url="http://xxx/monitor/report/sendReport" linenum=$(cat hostdfs_alert.txt | wc -l) let linenum+=1 echo $linenum #上报数据 function postdfs(){ for (( k = 1; k< $linenum ; k++ )) do key=$(echo $(sed -n "$k p" hostdfs_alert.txt | awk -F ":" '{print "xxx.hdfs_used##ip="$1"##cluster=bdp##env=prod"}') | awk '{gsub(/[[:blank:]]*/,"",$0);print $0;}' ) echo $key >> key.txt value=$(sed -n "$k p" hostdfs_alert.txt | awk -F ":" '{print $2}') data="{\"key\":\"$key\",\"value\":\"$value\"}" echo $data cmd="curl -s -X POST -H 'Content-type':'application/json' -d '${data}' ${url}" sh -c "$cmd" done } postdfs rm -f allkey.txt expression="x>95" time=5 count=1 alertMode="1,2,3" members="bdp-ops" call_users="bdp-ops" call_content="xxx" add_url="http://xxx/monitor/rule/addReport" hnum=$(cat hostname.txt | wc -l) let hnum+=1 echo $hnum function dfsalert(){ for(( n = 1;n<$hnum;n++)) do key=$(sed -n "$n p" hostname.txt | awk '{print "xxx.hdfs_used##ip="$1"##cluster=bdp##env=prod"}') str1=$(sed -n "$n p" hostname.txt | awk '{print $1}') str2="的HDFS使用率已经超过95%,请检查HDFS存储目录" echo $key >>allkey.txt description=${str1}${str2} add_data="{\"id\":\"$key\",\"expression\":\"$expression\",\"time\":$time,\"count\":$count,\"alertMode\":\"$alertMode\",\"members\":\"$members\",\"description\":\"$description\",\"call_users\":\"$call_users\",\"call_content\":\"$call_content\"}" echo $add_data add_cmd="curl -s -X POST -H 'Content-type':'application/json' -d '${add_data}' ${add_url}" sh -c "$add_cmd" done } dfsalert