效果如图:
- 首先创建企业微信机器人
接下来会如下所示获取到消息推送url
2.基于ES查询获取库中信息进行预警:
package com.adgn.timer.timer;
import com.adgn.timer.config.Config;
import com.adgn.timer.domain.EarlyWarningVo;
import com.adgn.timer.utils.httputils.HttpUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* @Author sannian
* @Date 2022/1/5 10:51
* @Version 1.0
*/
@Component
@EnableScheduling
public class EarlyWarningScheduledReady {
public final static Logger logger = LoggerFactory.getLogger(EarlyWarningScheduledReady.class);
@Autowired
public RestHighLevelClient restHighLevelClient;
@Scheduled(cron = "0 0 0/6 * * ?")
@Async("threadPoolTaskExecutor")
public void init(){
try {
logger.info("预警定时器启动");
// String url = "http://192.168.3.212:9999//v1/getInfoSource";
String resp = HttpUtils.get(url);
if (StringUtils.isBlank(resp)) {
logger.error("[domain cache] resp is empty ! url -> {}, resp -> {}", url, resp);
}
JSONArray result = JSON.parseArray(resp);
List arrayList = result.toJavaList(EarlyWarningVo.class);
getData(arrayList);
}catch (Exception e){
logger.error(e.getMessage());
}
}
private void getData(List crawlerSiteIds) throws IOException {
int size = crawlerSiteIds.size();
if (size == 0) {
return;
}
if (size>=500){
List smallCrawlerSiteIds = new ArrayList<>();
crawlerSiteIds.forEach(x->{
smallCrawlerSiteIds.add(x);
if (smallCrawlerSiteIds.size()>=500){
try {
queryES(smallCrawlerSiteIds, size);
smallCrawlerSiteIds.clear();
} catch (IOException e) {
logger.error(e.getMessage());
}
}
});
queryES(smallCrawlerSiteIds, size);
}else{
queryES(crawlerSiteIds, size);
}
}
private void queryES(List crawlerSiteIds, int size) throws IOException {
//查询体构造
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
//时间范围
RangeQueryBuilder rangeQueryBuilder = getRangeQueryBuilder();
boolQueryBuilder.must(rangeQueryBuilder);
String[] crawlerSiteIdString = crawlerSiteIdList.toArray(new String[crawlerSiteIdList.size()]);
//请求体构建
SearchRequest searchRequest = new SearchRequest(Config.get(Config.INDEX_NAME) + "-all");
searchRequest.source(searchSourceBuilder);
logger.info("query sql->{}", searchSourceBuilder.toString());
SearchResponse searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
Map aggMap = searchResponse.getAggregations().asMap();
List returnList = new ArrayList<>();
ParsedStringTerms teamAgg = (ParsedStringTerms) aggMap.get("idGroup");
Iterator teamBucketIt =
(Iterator) teamAgg.getBuckets().iterator();
List nullList = new ArrayList<>();
while (teamBucketIt.hasNext()) {
ParsedStringTerms.ParsedBucket buck = teamBucketIt.next();
//id
String team = (String) buck.getKey();
returnList.add(team);
}
crawlerSiteIds.forEach(x ->{
if (!returnList.contains(x.getId())){
nullList.add(x);
}
});
if (nullList.size() > 0) {
sendWaring(nullList);
}
}
private RangeQueryBuilder getRangeQueryBuilder() {
Date date = new Date();
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
String endTime = simpleDateFormat.format(date);
calendar.add(Calendar.HOUR_OF_DAY, -6);
String startTime = simpleDateFormat.format(calendar.getTimeInMillis());
return new RangeQueryBuilder("created_at")
.from(startTime)
.to(endTime)
.includeUpper(true)
.includeLower(true)
.format("yyyy-MM-dd HH:mm:ss");
}
private void sendWaring(List jsonObjectList) {
HttpUtils.post(jsonObjectList);
}
}
public static void post(List<EarlyWarningVo> crawlerSiteIdList) {
// 设置连接超时时间
OkHttpClient client = new OkHttpClient.Builder().connectTimeout(10, TimeUnit.SECONDS)
// 设置读取超时时间
.readTimeout(20, TimeUnit.SECONDS)
.build();
String url = "自己的url";
Response response = null;
try {
String reqBody = "{" +
" \"msgtype\":\"text\"," +
" \"text\":{" +
" \"content\":\"预警测试 !!! 测试ES近6小时入库为0站点:\n" +crawlerSiteIdList.toString()+","+
" \"mentioned_list\":[\"@all\"]" +
" }"+
"}";
MediaType contentType = MediaType.parse("application/json; charset=utf-8");
RequestBody body = RequestBody.create(contentType, reqBody);
Request request = new Request.Builder().url(url).post(body).addHeader("cache-control", "no-cache").build();
response = client.newCall(request).execute();
byte[] datas = response.body().bytes();
String respMsg = new String(datas);
logger.info(respMsg);
} catch (IOException e) {
logger.error(e.getMessage());
} finally {
response.body().close();
}
}
3.如果有需要扩展的可以查看企信机器人使用文档:
https://developer.work.weixin.qq.com/document/path/91770#%E5%A6%82%E4%BD%95%E4%BD%BF%E7%94%A8%E7%BE%A4%E6%9C%BA%E5%99%A8%E4%BA%BA
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)