use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class VideoBufferPlot method calculateBufferProgress.
/*
* called by:
* -> VideoBufferPlot.VideoBufferPlot(AROTraceData, VideoStream)
* -> SegmentBufferGraphPanel.refresh(AROTraceData, VideoStream, JCheckBox, JCheckBox)
*/
public void calculateBufferProgress(AROTraceData aroTraceData) {
bufferProgressSeries.clear();
if (aroTraceData != null && aroTraceData.getAnalyzerResult().getStreamingVideoData() != null) {
for (VideoStream videoStream : aroTraceData.getAnalyzerResult().getStreamingVideoData().getVideoStreams()) {
if (videoStream.isSelected()) {
LOGGER.debug("VideoStream :" + videoStream.getManifest().getVideoName());
for (XYPair xy : videoStream.getPlayTimeList()) {
bufferProgressSeries.add(xy.getXVal(), xy.getYVal());
}
}
}
}
minYValue = bufferProgressSeries.getMinY();
maxYValue = bufferProgressSeries.getMaxY();
minXValue = bufferProgressSeries.getMinX();
maxXValue = bufferProgressSeries.getMaxX();
}
use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class SegmentTablePanel method updateTitleButton.
public void updateTitleButton(AROTraceData analyzerResult) {
if (titlePanel != null) {
if (analyzerResult.getAnalyzerResult() != null) {
streamingVideoData = analyzerResult.getAnalyzerResult().getStreamingVideoData();
for (VideoStream manifest : streamingVideoData.getVideoStreamMap().values()) {
if (manifest.equals(videoStream) && ((!videoStream.getVideoEventsBySegment().isEmpty()) && ((VideoEvent) videoStream.getVideoEventsBySegment().toArray()[0]).getSegmentID() >= 0)) {
videoStream.setSelected(manifest.isSelected());
enableCheckBox.setSelected(videoStream.isSelected());
streamingVideoData.setValidatedCount(false);
break;
}
}
if (streamingVideoData.getValidatedCount()) {
streamingVideoData.scanVideoStreams();
}
}
}
}
use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class VideoManifestPanel method updateGraphPanels.
public void updateGraphPanels(AROTraceData analyzerResult, Collection<VideoStream> videoStreamMap) {
VideoTab videoTab = aroView.getVideoTab();
if (videoStreamMap != null && videoStreamMap.size() > 0) {
Iterator<VideoStream> videoStreamIterator = videoStreamMap.iterator();
while (videoStreamIterator.hasNext()) {
VideoStream videoStream = videoStreamIterator.next();
SegmentThroughputGraphPanel throughputGraphPanel = videoTab.getThroughputGraphPanel();
SegmentProgressGraphPanel progressGraphPanel = videoTab.getProgressGraphPanel();
SegmentBufferGraphPanel bufferGraphPanel = videoTab.getBufferGraphPanel();
if (videoStream.getVideoSegmentEventList().size() > 0 || videoStream.getAudioSegmentEventList().size() > 0) {
throughputGraphPanel.refresh(analyzerResult, videoStream, null, null);
progressGraphPanel.refresh(analyzerResult, videoStream, null, null);
boolean isStartupDelaySet = (videoStream.getPlayRequestedTime() != null || videoStream.getVideoPlayBackTime() != null);
if (isStartupDelaySet && videoStream.isCurrentStream()) {
bufferGraphPanel.refresh(analyzerResult, videoStream, null, null);
}
boolean isGraphVisible = videoStreamMap.size() == 1;
toggleGraphPanels(videoTab, isGraphVisible, isStartupDelaySet);
} else {
toggleGraphPanels(videoTab, false, false);
}
}
} else {
toggleGraphPanels(videoTab, false, false);
}
}
use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class VideoNetworkComparisonImpl method runTest.
@Override
public AbstractBestPracticeResult runTest(PacketAnalyzerResult tracedata) {
BPResultType bpResultType = BPResultType.SELF_TEST;
double avgKbps = 0.0;
double avgBitRate = 0.0;
double summaryBitRate = 0.0;
VideoNetworkComparisonResult result = new VideoNetworkComparisonResult();
init(result);
SortedMap<Integer, SegmentComparison> qualityMap = new TreeMap<>();
if ((streamingVideoData = tracedata.getStreamingVideoData()) != null && (videoStreamCollection = streamingVideoData.getVideoStreamMap()) != null && MapUtils.isNotEmpty(videoStreamCollection)) {
selectedCount = streamingVideoData.getSelectedManifestCount();
invalidCount = streamingVideoData.getInvalidManifestCount();
List<VideoEvent> filteredVideoSegment = filterVideoSegment(streamingVideoData);
if (selectedCount == 0) {
if (invalidCount == videoStreamCollection.size()) {
result.setResultText(invalidManifestsFound);
} else if (invalidCount > 0) {
result.setResultText(noManifestsSelectedMixed);
} else {
result.setResultText(noManifestsSelected);
}
bpResultType = BPResultType.CONFIG_REQUIRED;
result.setResultExcelText(BPResultType.CONFIG_REQUIRED.getDescription());
result.setSelfTest(false);
} else if (selectedCount > 1) {
bpResultType = BPResultType.CONFIG_REQUIRED;
result.setResultText(multipleManifestsSelected);
result.setResultExcelText(BPResultType.CONFIG_REQUIRED.getDescription());
result.setSelfTest(false);
} else {
SegmentComparison segmentComparison;
for (VideoStream videoStream : videoStreamCollection.values()) {
if (videoStream.isSelected() && MapUtils.isNotEmpty(videoStream.getVideoEventMap())) {
for (VideoEvent videoEvent : videoStream.getVideoEventMap().values()) {
if (videoEvent.isNormalSegment() && videoEvent.isSelected()) {
Integer track = StringParse.stringToDouble(videoEvent.getQuality(), 0).intValue();
double endTS = videoEvent.getEndTS();
double startTS = videoEvent.getStartTS();
double durationInMilliseconds = endTS - startTS;
double throughput = 0.0;
if (durationInMilliseconds > 0) {
throughput = (videoEvent.getTotalBytes() * 8) / durationInMilliseconds;
}
if ((segmentComparison = qualityMap.get(track)) != null) {
int count = segmentComparison.getCount();
segmentComparison.setCount(++count);
segmentComparison.getCalculatedThroughputList().add(throughput);
} else {
List<Double> throughputs = new ArrayList<Double>();
throughputs.add(throughput);
segmentComparison = new SegmentComparison(videoEvent.getManifest().getVideoName(), // declaredBitrate (kbps)
1, // declaredBitrate (kbps)
track, // declaredBitrate (kbps)
videoEvent.getChildManifest().getBandwidth() / 1000.0, throughputs);
qualityMap.put(track, segmentComparison);
}
}
}
}
}
result.setResults(qualityMap);
avgBitRate = getAvgBitRate(summaryBitRate, filteredVideoSegment);
avgKbps = getAvgThroughput(tracedata);
result.setAvgBitRate(avgBitRate);
result.setAvgKbps(avgKbps);
result.setSelfTest(true);
bpResultType = BPResultType.SELF_TEST;
result.setResultText(MessageFormat.format(textResults, avgKbps, avgBitRate));
result.setResultExcelText(MessageFormat.format(textExcelResults, BPResultType.SELF_TEST.getDescription(), avgKbps, avgBitRate));
}
} else {
result.setResultText(noData);
result.setResultExcelText(BPResultType.NO_DATA.getDescription());
bpResultType = BPResultType.NO_DATA;
}
result.setResultType(bpResultType);
return result;
}
use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class VideoResolutionQualityImpl method scanManifestsForHeight.
/**
* Sets values on maxHeightUsed and overSizeCount
* @param videoStreamCollection
*/
private void scanManifestsForHeight(SortedMap<Double, VideoStream> videoStreamCollection) {
maxHeightUsed = 0;
overSizeCount = 0;
for (VideoStream videoStream : videoStreamCollection.values()) {
if (videoStream.isSelected()) {
for (VideoEvent videoEvent : videoStream.getVideoEventMap().values()) {
double height = videoEvent.getResolutionHeight();
if (height > maxHeightUsed) {
maxHeightUsed = height;
}
if (height > MAX_HEIGHT) {
overSizeCount++;
}
}
break;
}
}
}
Aggregations