use of com.alibaba.otter.manager.biz.statistics.throughput.param.TimelineThroughputCondition in project otter by alibaba.
the class ThroughputStatServiceImpl method listRealtimeThroughput.
/**
* 3种时间间隔的统计信息
*/
public Map<AnalysisType, ThroughputInfo> listRealtimeThroughput(RealtimeThroughputCondition condition) {
Assert.assertNotNull(condition);
Map<AnalysisType, ThroughputInfo> throughputInfos = new HashMap<AnalysisType, ThroughputInfo>();
TimelineThroughputCondition timelineCondition = new TimelineThroughputCondition();
Date realtime = new Date(System.currentTimeMillis());
timelineCondition.setPipelineId(condition.getPipelineId());
timelineCondition.setType(condition.getType());
timelineCondition.setStart(new Date(realtime.getTime() - condition.getMax() * 60 * 1000));
timelineCondition.setEnd(realtime);
List<ThroughputStatDO> throughputStatDOs = throughputDao.listTimelineThroughputStat(timelineCondition);
for (AnalysisType analysisType : condition.getAnalysisType()) {
ThroughputInfo throughputInfo = new ThroughputInfo();
List<ThroughputStat> throughputStat = new ArrayList<ThroughputStat>();
for (ThroughputStatDO throughputStatDO : throughputStatDOs) {
if (realtime.getTime() - throughputStatDO.getEndTime().getTime() <= analysisType.getValue() * 60 * 1000) {
throughputStat.add(throughputStatDOToModel(throughputStatDO));
}
}
throughputInfo.setItems(throughputStat);
throughputInfo.setSeconds(analysisType.getValue() * 60L);
throughputInfos.put(analysisType, throughputInfo);
}
return throughputInfos;
}
use of com.alibaba.otter.manager.biz.statistics.throughput.param.TimelineThroughputCondition in project otter by alibaba.
the class AnalysisThroughputHistory method execute.
public void execute(@Param("d5221") String startTime, @Param("d5222") String endTime, @Param("pipelineId") Long pipelineId, HttpSession session, Context context) throws Exception {
Date end = null;
Date start = null;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
if (StringUtils.isEmpty(startTime) || StringUtils.isEmpty(endTime)) {
start = new Date(System.currentTimeMillis() / 60000 * 60000 - 24 * 60 * 60 * 1000);
end = new Date(System.currentTimeMillis() / 60000 * 60000);
} else {
// 当前24小时,时间取整分钟
sdf.setLenient(false);
if (null != startTime && null != endTime) {
start = sdf.parse(startTime);
end = sdf.parse(endTime);
}
}
Channel channel = channelService.findByPipelineId(pipelineId);
Map<Long, ThroughputInfo> throughputInfos1 = new LinkedHashMap<Long, ThroughputInfo>();
Map<Long, ThroughputInfo> throughputInfos2 = new LinkedHashMap<Long, ThroughputInfo>();
TimelineThroughputCondition condition1 = new TimelineThroughputCondition();
TimelineThroughputCondition condition2 = new TimelineThroughputCondition();
if (null != start && null != end) {
condition1.setStart(start);
condition1.setEnd(end);
condition1.setType(ThroughputType.ROW);
condition1.setPipelineId(pipelineId);
condition2.setStart(start);
condition2.setEnd(end);
condition2.setType(ThroughputType.FILE);
condition2.setPipelineId(pipelineId);
throughputInfos1 = throughputStatService.listTimelineThroughput(condition1);
throughputInfos2 = throughputStatService.listTimelineThroughput(condition2);
}
Long totalRecord1 = 0L;
Long totalRecord2 = 0L;
Long totalSize1 = 0L;
Long totalSize2 = 0L;
for (ThroughputInfo info : throughputInfos1.values()) {
totalRecord1 += info.getNumber();
totalSize1 += info.getSize();
}
for (ThroughputInfo info : throughputInfos2.values()) {
totalRecord2 += info.getNumber();
totalSize2 += info.getSize();
}
context.put("throughputInfos1", throughputInfos1);
context.put("throughputInfos2", throughputInfos2);
context.put("totalRecord1", totalRecord1);
context.put("totalRecord2", totalRecord2);
context.put("totalSize1", totalSize1);
context.put("totalSize2", totalSize2);
context.put("channel", channel);
context.put("pipelineId", pipelineId);
context.put("start", sdf.format(start));
context.put("end", sdf.format(end));
}
Aggregations