use of com.alibaba.otter.manager.biz.statistics.throughput.param.ThroughputInfo in project otter by alibaba.
the class ThroughputStatServiceImpl method listRealtimeThroughput.
/**
* 3种时间间隔的统计信息
*/
public Map<AnalysisType, ThroughputInfo> listRealtimeThroughput(RealtimeThroughputCondition condition) {
Assert.assertNotNull(condition);
Map<AnalysisType, ThroughputInfo> throughputInfos = new HashMap<AnalysisType, ThroughputInfo>();
TimelineThroughputCondition timelineCondition = new TimelineThroughputCondition();
Date realtime = new Date(System.currentTimeMillis());
timelineCondition.setPipelineId(condition.getPipelineId());
timelineCondition.setType(condition.getType());
timelineCondition.setStart(new Date(realtime.getTime() - condition.getMax() * 60 * 1000));
timelineCondition.setEnd(realtime);
List<ThroughputStatDO> throughputStatDOs = throughputDao.listTimelineThroughputStat(timelineCondition);
for (AnalysisType analysisType : condition.getAnalysisType()) {
ThroughputInfo throughputInfo = new ThroughputInfo();
List<ThroughputStat> throughputStat = new ArrayList<ThroughputStat>();
for (ThroughputStatDO throughputStatDO : throughputStatDOs) {
if (realtime.getTime() - throughputStatDO.getEndTime().getTime() <= analysisType.getValue() * 60 * 1000) {
throughputStat.add(throughputStatDOToModel(throughputStatDO));
}
}
throughputInfo.setItems(throughputStat);
throughputInfo.setSeconds(analysisType.getValue() * 60L);
throughputInfos.put(analysisType, throughputInfo);
}
return throughputInfos;
}
use of com.alibaba.otter.manager.biz.statistics.throughput.param.ThroughputInfo in project otter by alibaba.
the class ThroughputStatServiceImpl method listTimelineThroughput.
/**
* <pre>
* 列出pipeLineId下,start-end时间段下的throughputStat
* 首先从数据库中取出这一段时间所以数据,该数据都是根据end_time倒排序的, 每隔1分钟将这些数据分组
* </pre>
*/
public Map<Long, ThroughputInfo> listTimelineThroughput(TimelineThroughputCondition condition) {
Assert.assertNotNull(condition);
Map<Long, ThroughputInfo> throughputInfos = new LinkedHashMap<Long, ThroughputInfo>();
List<ThroughputStatDO> throughputStatDOs = throughputDao.listTimelineThroughputStat(condition);
int size = throughputStatDOs.size();
int k = size - 1;
for (Long i = condition.getStart().getTime(); i <= condition.getEnd().getTime(); i += 60 * 1000) {
ThroughputInfo throughputInfo = new ThroughputInfo();
List<ThroughputStat> throughputStat = new ArrayList<ThroughputStat>();
// 取出每个时间点i以内的数据,k是一个游标,每次遍历时前面已经取过了的数据就不用再遍历了
for (int j = k; j >= 0; --j) {
if ((i - throughputStatDOs.get(j).getEndTime().getTime() <= 60 * 1000) && (i - throughputStatDOs.get(j).getEndTime().getTime() >= 0)) {
throughputStat.add(throughputStatDOToModel(throughputStatDOs.get(j)));
k = j - 1;
} else // 如果不满足if条件,则后面的数据也不用再遍历
{
break;
}
}
if (throughputStat.size() > 0) {
throughputInfo.setItems(throughputStat);
throughputInfo.setSeconds(1 * 60L);
throughputInfos.put(i, throughputInfo);
}
}
return throughputInfos;
}
use of com.alibaba.otter.manager.biz.statistics.throughput.param.ThroughputInfo in project otter by alibaba.
the class AnalysisThroughputHistory method execute.
public void execute(@Param("d5221") String startTime, @Param("d5222") String endTime, @Param("pipelineId") Long pipelineId, HttpSession session, Context context) throws Exception {
Date end = null;
Date start = null;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
if (StringUtils.isEmpty(startTime) || StringUtils.isEmpty(endTime)) {
start = new Date(System.currentTimeMillis() / 60000 * 60000 - 24 * 60 * 60 * 1000);
end = new Date(System.currentTimeMillis() / 60000 * 60000);
} else {
// 当前24小时,时间取整分钟
sdf.setLenient(false);
if (null != startTime && null != endTime) {
start = sdf.parse(startTime);
end = sdf.parse(endTime);
}
}
Channel channel = channelService.findByPipelineId(pipelineId);
Map<Long, ThroughputInfo> throughputInfos1 = new LinkedHashMap<Long, ThroughputInfo>();
Map<Long, ThroughputInfo> throughputInfos2 = new LinkedHashMap<Long, ThroughputInfo>();
TimelineThroughputCondition condition1 = new TimelineThroughputCondition();
TimelineThroughputCondition condition2 = new TimelineThroughputCondition();
if (null != start && null != end) {
condition1.setStart(start);
condition1.setEnd(end);
condition1.setType(ThroughputType.ROW);
condition1.setPipelineId(pipelineId);
condition2.setStart(start);
condition2.setEnd(end);
condition2.setType(ThroughputType.FILE);
condition2.setPipelineId(pipelineId);
throughputInfos1 = throughputStatService.listTimelineThroughput(condition1);
throughputInfos2 = throughputStatService.listTimelineThroughput(condition2);
}
Long totalRecord1 = 0L;
Long totalRecord2 = 0L;
Long totalSize1 = 0L;
Long totalSize2 = 0L;
for (ThroughputInfo info : throughputInfos1.values()) {
totalRecord1 += info.getNumber();
totalSize1 += info.getSize();
}
for (ThroughputInfo info : throughputInfos2.values()) {
totalRecord2 += info.getNumber();
totalSize2 += info.getSize();
}
context.put("throughputInfos1", throughputInfos1);
context.put("throughputInfos2", throughputInfos2);
context.put("totalRecord1", totalRecord1);
context.put("totalRecord2", totalRecord2);
context.put("totalSize1", totalSize1);
context.put("totalSize2", totalSize2);
context.put("channel", channel);
context.put("pipelineId", pipelineId);
context.put("start", sdf.format(start));
context.put("end", sdf.format(end));
}
use of com.alibaba.otter.manager.biz.statistics.throughput.param.ThroughputInfo in project otter by alibaba.
the class AnalysisThroughputStat method execute.
public void execute(@Param("pipelineId") Long pipelineId, Context context) throws Exception {
Channel channel = channelService.findByPipelineId(pipelineId);
RealtimeThroughputCondition condition1 = new RealtimeThroughputCondition();
RealtimeThroughputCondition condition2 = new RealtimeThroughputCondition();
ThroughputCondition condition11 = new ThroughputCondition();
ThroughputCondition condition22 = new ThroughputCondition();
List<AnalysisType> analysisType = new ArrayList<AnalysisType>();
analysisType.add(AnalysisType.ONE_MINUTE);
analysisType.add(AnalysisType.FIVE_MINUTE);
analysisType.add(AnalysisType.FIFTEEN_MINUTE);
condition1.setPipelineId(pipelineId);
condition1.setAnalysisType(analysisType);
condition1.setType(ThroughputType.FILE);
condition2.setPipelineId(pipelineId);
condition2.setAnalysisType(analysisType);
condition2.setType(ThroughputType.ROW);
condition11.setPipelineId(pipelineId);
condition11.setType(ThroughputType.FILE);
condition22.setPipelineId(pipelineId);
condition22.setType(ThroughputType.ROW);
Map<AnalysisType, ThroughputInfo> throughputInfos1 = throughputStatService.listRealtimeThroughput(condition1);
Map<AnalysisType, ThroughputInfo> throughputInfos2 = throughputStatService.listRealtimeThroughput(condition2);
ThroughputStat throughputStat1 = throughputStatService.findThroughputStatByPipelineId(condition11);
ThroughputStat throughputStat2 = throughputStatService.findThroughputStatByPipelineId(condition22);
context.put("throughputInfos1", throughputInfos1);
context.put("throughputInfos2", throughputInfos2);
context.put("channel", channel);
context.put("pipelineId", pipelineId);
context.put("throughputStat1", throughputStat1);
context.put("throughputStat2", throughputStat2);
context.put("one", AnalysisType.ONE_MINUTE);
context.put("five", AnalysisType.FIVE_MINUTE);
context.put("fifteen", AnalysisType.FIFTEEN_MINUTE);
}
Aggregations