#!/bin/bashexport JAVA_HOME=/app/jdk/jdk1.8.0_92export HADOOP_CONF_DIR=/home/hdfs/balancer/hadoop-confrm -f hostname.txtrm -f dfused.txtrm -f hostdfs.txtrm -f hostdfs_alert.txt###获取大于97%的主机名和dfs used%function checkdfs(){echo "start checking hdfs used rate"hdfs dfsadmin -report | grep -i "50010" | awk -F ":" ‘{print }‘| awk ‘{print }‘ > hostname.txthdfs dfsadmin -report | grep "DFS Used%" | awk -F ": " ‘{print }‘ | awk -F "%" ‘{print }‘ > dfused.txtlivesum=$(hdfs dfsadmin -report | grep "live datanodes" | awk ‘{print }‘ | awk -F "):" ‘{print }‘ | awk -F "(" ‘{print }‘)echo $livesumsed -i ‘1d‘ dfused.txtlet livesum+=1sed -i $livesum‘,$d‘ dfused.txt sed -i $livesum‘,$d‘ hostname.txtlinesum=$(cat hostname.txt | wc -l)echo $linesumharr=($(awk ‘{print }‘ hostname.txt)) darr=($(awk ‘{print }‘ dfused.txt)) if [ $linesum -gt 0 ] then for (( i = 0 ; i < ${#harr[@]} ; i++ )) do for (( j = 0 ; j < ${#darr[@]} ; j++ )) do if [ $i -eq $j ] then echo ${harr[$i]} ":" ${darr[$j]} >> hostdfs.txt fi done doneelse echo "Not live Datanodes"fiddarr=($(cat hostdfs.txt | awk ‘>97{print }‘ |sort -ru))for (( m = 0; m< ${#ddarr[@]} ; m++ ))do if [[ $(echo $(cat hostdfs.txt) | grep "${ddarr[$m]}") != "" ]] then echo $(cat hostdfs.txt | grep "${ddarr[$m]}" | awk ‘NR==1{print }‘) ":" ${ddarr[$m]} >> hostdfs_alert.txt fidone}checkdfsurl="http://xxx/monitor/report/sendReport"linenum=$(cat hostdfs_alert.txt | wc -l)let linenum+=1echo $linenum#上报数据function postdfs(){for (( k = 1; k< $linenum ; k++ )) do key=$(echo $(sed -n "$k p" hostdfs_alert.txt | awk -F ":" ‘{print "bdp.ops."".##cluster=bdp##env=prod"}‘) | awk ‘{gsub(/[[:blank:]]*/,"",);print ;}‘ ) echo $key >>key.txt value=$(sed -n "$k p" hostdfs_alert.txt | awk -F ": " ‘{print }‘) data="{\"key\":\"$key\",\"value\":\"$value\"}" echo $data cmd="curl -s -X POST -H ‘Content-type‘:‘application/Json‘ -d ‘${data}‘ ${url}" sh -c "$cmd" done}postdfs data_Expression="x>97"data_time=5data_count=1data_alertMode="1,2,3"data_members="01384016"add_url="http://xxx/monitor/rule/addReport"keynum=$(cat key.txt | wc -l)let keynum+=1echo $keynumfunction dfsalert(){for((n=1;n<$keynum;n++))dodata_key=$(sed -n "$n p" key.txt | awk ‘{print }‘)str1=$(sed -n "$n p" key.txt | awk -F "." ‘{print "."".""."}‘)str2="以下主机IP HDFS使用已经超过97%:"data_description=${str2}${str1}add_data="{\"ID\":\"$data_key\",\"Expression\":\"$data_Expression\",\"time\":$data_time,\"count\":$data_count,\"alertMode\":\"$data_alertMode\",\"members\":\"$data_members\",\"description\":\"$data_description\"}"echo $add_dataadd_cmd="curl -s -X POST -H ‘Content-type‘:‘application/Json‘ -d ‘${add_data}‘ ${add_url}"sh -c "$add_cmd"done}dfsalert总结
以上是内存溢出为你收集整理的Linux记录-告警脚本全部内容,希望文章能够帮你解决Linux记录-告警脚本所遇到的程序开发问题。
如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)