use of com.att.aro.core.packetanalysis.pojo.VideoStall in project VideoOptimzer by attdevsupport.
the class BPVideoStallTablePanel method getContentTable.
@SuppressWarnings("unchecked")
public DataTable<VideoStall> getContentTable() {
if (contentTable == null) {
contentTable = new DataTable<VideoStall>(tableModel);
contentTable.setName(ResourceBundleHelper.getMessageString("video.stalls.tableName"));
contentTable.setAutoCreateRowSorter(true);
contentTable.setGridColor(Color.LIGHT_GRAY);
contentTable.setRowHeight(ROW_HEIGHT);
contentTable.setAutoResizeMode(JTable.AUTO_RESIZE_SUBSEQUENT_COLUMNS);
TableRowSorter<TableModel> sorter = new TableRowSorter<>(tableModel);
contentTable.setRowSorter(sorter);
sorter.setComparator(VideoStallTableModel.COL_2, Util.getIntSorter());
sorter.setComparator(VideoStallTableModel.COL_3, Util.getFloatSorter());
sorter.setComparator(VideoStallTableModel.COL_4, Util.getFloatSorter());
sorter.setComparator(VideoStallTableModel.COL_5, Util.getFloatSorter());
sorter.toggleSortOrder(SimultnsConnTableModel.COL_1);
DataTablePopupMenu popupMenu = (DataTablePopupMenu) contentTable.getPopup();
popupMenu.initialize();
}
return contentTable;
}
use of com.att.aro.core.packetanalysis.pojo.VideoStall in project VideoOptimzer by attdevsupport.
the class VideoStallImpl method runTest.
@Override
public AbstractBestPracticeResult runTest(PacketAnalyzerResult tracedata) {
List<VideoStall> videoStallResult = tracedata.getVideoStalls();
List<VideoStall> stallResult = new ArrayList<VideoStall>();
result = new VideoStallResult();
init(result);
if ((streamingVideoData = tracedata.getStreamingVideoData()) != null && (videoStreamCollection = streamingVideoData.getVideoStreamMap()) != null && MapUtils.isNotEmpty(videoStreamCollection)) {
bpResultType = BPResultType.CONFIG_REQUIRED;
result.setResultExcelText(BPResultType.CONFIG_REQUIRED.getDescription());
selectedManifestCount = streamingVideoData.getSelectedManifestCount();
hasSelectedManifest = (selectedManifestCount > 0);
invalidCount = streamingVideoData.getInvalidManifestCount();
if (selectedManifestCount == 0) {
if (invalidCount == videoStreamCollection.size()) {
result.setResultText(invalidManifestsFound);
} else if (invalidCount > 0) {
result.setResultText(noManifestsSelectedMixed);
} else {
result.setResultText(noManifestsSelected);
}
} else if (selectedManifestCount > 1) {
result.setResultText(multipleManifestsSelected);
} else if (hasSelectedManifest) {
bpResultType = BPResultType.SELF_TEST;
double stallTriggerTime = videoPref.getVideoUsagePreference().getStallTriggerTime();
int stallCount = 0;
if (videoStallResult != null) {
for (VideoStall stall : videoStallResult) {
if (stall.getDuration() >= stallTriggerTime) {
stallCount++;
stallResult.add(updateStallResult(stall));
}
}
}
int count = 0;
if (warningCount != 0 || failCount != 0) {
if (failCount > 0) {
bpResultType = BPResultType.FAIL;
count = failCount;
} else if (warningCount > 0) {
bpResultType = BPResultType.WARNING;
count = warningCount;
}
} else {
bpResultType = BPResultType.PASS;
count = passCount;
}
double startupDelay = videoPref.getVideoUsagePreference().getStartupDelay();
if (MapUtils.isEmpty(streamingVideoData.getStreamingVideoCompiled().getChunkPlayTimeList())) {
// Meaning startup delay is not set yet
bpResultType = BPResultType.CONFIG_REQUIRED;
result.setResultText(MessageFormat.format(startUpDelayNotSet, startupDelay, startupDelay == 1 ? "" : "s"));
result.setResultExcelText(BPResultType.CONFIG_REQUIRED.getDescription());
} else {
result.setResultText(MessageFormat.format(this.textResults, stallCount, stallCount == 1 ? "" : "s", count == 1 ? "was" : "were", count, count == 1 ? "" : "s", bpResultType.toString().toLowerCase()));
switch(bpResultType) {
case PASS:
result.setResultExcelText(BPResultType.PASS.getDescription());
break;
case SELF_TEST:
result.setResultExcelText(BPResultType.SELF_TEST.getDescription());
case WARNING:
case FAIL:
result.setResultExcelText(MessageFormat.format(textExcelResults, bpResultType.getDescription(), stallCount));
break;
default:
break;
}
}
result.setVideoStallResult(stallCount);
result.setResults(stallResult);
}
} else {
result.setResultText(noData);
bpResultType = BPResultType.NO_DATA;
result.setResultExcelText(BPResultType.NO_DATA.getDescription());
}
result.setResultType(bpResultType);
return result;
}
use of com.att.aro.core.packetanalysis.pojo.VideoStall in project VideoOptimzer by attdevsupport.
the class ARODiagnosticsOverviewRouteImpl method updateDiagnosticsTab.
@Override
public void updateDiagnosticsTab(Object routeInfo) {
int oldPanelIndex = jtabbedPane.getSelectedIndex();
jtabbedPane.setSelectedIndex(DIAGNOSTIC_INDEX);
DiagnosticsTab diagnosticsTab = (DiagnosticsTab) jtabbedPane.getSelectedComponent();
if (routeInfo == null) {
jtabbedPane.setSelectedIndex(oldPanelIndex);
LOG.error("Diagnostics Tab needs a type for updating");
return;
}
LOG.debug("Type used to route to Diagnostics Tab: " + routeInfo.getClass().getSimpleName());
if (routeInfo instanceof CacheEntry) {
diagnosticsTab.setHighlightedTCP(((CacheEntry) routeInfo).getHttpRequestResponse());
} else if (routeInfo instanceof Session) {
diagnosticsTab.setHighlightedTCP(((Session) routeInfo));
} else if (routeInfo instanceof HttpRequestResponseInfo) {
diagnosticsTab.setHighlightedTCP((HttpRequestResponseInfo) routeInfo);
} else if (routeInfo instanceof HttpEntry) {
diagnosticsTab.setHighlightedTCP(((HttpEntry) routeInfo).getHttpRequestResponse());
} else if (routeInfo instanceof DisplayNoneInCSSEntry) {
diagnosticsTab.setHighlightedTCP(((DisplayNoneInCSSEntry) routeInfo).getHttpRequestResponse());
} else if (routeInfo instanceof ImageMdataEntry) {
diagnosticsTab.setHighlightedTCP(((ImageMdataEntry) routeInfo).getHttpRequestResponse());
} else if (routeInfo instanceof ImageCompressionEntry) {
diagnosticsTab.setHighlightedTCP(((ImageCompressionEntry) routeInfo).getHttpRequestResponse());
} else if (routeInfo instanceof MultipleConnectionsEntry) {
if (((MultipleConnectionsEntry) routeInfo).isMultiple()) {
jtabbedPane.setSelectedIndex(WATERFALL_INDEX);
WaterfallTab waterfallTab = (WaterfallTab) jtabbedPane.getSelectedComponent();
waterfallTab.updateGraph(((MultipleConnectionsEntry) routeInfo).getHttpReqRespInfo());
} else {
if (((MultipleConnectionsEntry) routeInfo).getHttpReqRespInfo().getSession() != null) {
diagnosticsTab.setHighlightedSessionTCP(((MultipleConnectionsEntry) routeInfo).getHttpReqRespInfo());
} else {
diagnosticsTab.setHighlightedTCP(((MultipleConnectionsEntry) routeInfo).getHttpReqRespInfo());
}
}
} else if (routeInfo instanceof SpriteImageEntry) {
diagnosticsTab.setHighlightedTCP(((SpriteImageEntry) routeInfo).getHttpRequestResponse());
} else if (routeInfo instanceof UnnecessaryConnectionEntry) {
UnnecessaryConnectionEntry unConnectionEntry = (UnnecessaryConnectionEntry) routeInfo;
diagnosticsTab.setHighlightedTCP(unConnectionEntry.getLowTime());
} else if (routeInfo instanceof TransmissionPrivateDataEntry || routeInfo instanceof UnsecureSSLVersionEntry || routeInfo instanceof ForwardSecrecyEntry) {
diagnosticsTab.setHighlightedTCP(routeInfo);
} else if (routeInfo instanceof VideoStall) {
double timestamp = ((VideoStall) routeInfo).getSegmentTryingToPlay().getStartTS();
diagnosticsTab.getGraphPanel().setGraphView(timestamp, true);
diagnosticsTab.getVideoPlayer().setMediaTime(timestamp);
} else {
jtabbedPane.setSelectedIndex(oldPanelIndex);
LOG.error("Diagnostics Tab cannot handle a type of " + routeInfo.getClass().getSimpleName() + " for updating");
}
}
use of com.att.aro.core.packetanalysis.pojo.VideoStall in project VideoOptimzer by attdevsupport.
the class VideoStallResult method hashCode.
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + videoStalls;
for (VideoStall stall : videoStallList) {
result = prime * result + stall.hashCode();
}
result = prime * result + getBestPracticeDescription().hashCode();
result = prime * result + getBestPracticeType().hashCode();
return result;
}
use of com.att.aro.core.packetanalysis.pojo.VideoStall in project VideoOptimzer by attdevsupport.
the class VideoSegmentAnalyzer method syncWithAudio.
/**
* <pre>
* Scan through all audio event related to videoEvent Starting with audio event
* from before the videoEvent Record all audio segments associated with Video
* segment. Including partial overlaps, often audio and video segments do not
* start at the same time.
*
* @param startupOffset
*
* @param videoStream contains collections of Video, Audio and Captioning
* @param audioStreamMap contains all audio in videoStream (when non-muxed)
* <key definition: segmentStartTime, endTS(in
* milliseconds)>
* @param videoEvent The video segment to receive audio linkage
* @param appliedStallTime
* @return audioEvent associated with a stall
*/
private VideoEvent syncWithAudio(double startupOffset, VideoStream videoStream, TreeMap<String, VideoEvent> audioStreamMap, VideoEvent videoEvent) {
VideoEvent audioEvent = null;
String segmentStartTime = VideoStream.generateTimestampKey(videoEvent.getSegmentStartTime());
String segmentEndTime = VideoStream.generateTimestampKey(videoEvent.getSegmentStartTime() + videoEvent.getDuration());
String audioKeyStart = null;
String audioKeyEnd = null;
try {
audioKeyStart = audioStreamMap.lowerKey(segmentStartTime);
audioKeyEnd = audioStreamMap.higherKey(segmentEndTime);
String key = audioKeyStart;
while (!key.equals(audioKeyEnd)) {
VideoEvent lastAudioEvent = audioEvent;
VideoEvent tempEvent = audioStreamMap.get(key);
if (tempEvent.isSelected()) {
audioEvent = tempEvent;
calcAudioTime(videoEvent, audioEvent);
double audioPlaytime = audioEvent.getSegmentStartTime() + startupOffset + totalStallOffset;
if (audioEvent.getDLLastTimestamp() > audioPlaytime) {
double stallPoint = lastAudioEvent.getSegmentStartTime() + audioEvent.getDuration() - videoPrefs.getStallPausePoint();
stallOffset = audioEvent.getDLLastTimestamp() - audioEvent.getPlayTime() + getStallRecovery();
stallOffset = calcSegmentStallOffset(startupOffset, audioEvent, totalStallOffset);
stallOffset = audioEvent.getDLLastTimestamp() - audioPlaytime + getStallRecovery();
audioEvent.setStallTime(stallOffset);
videoEvent.setStallTime(stallOffset);
totalStallOffset += stallOffset;
videoStall = new VideoStall(stallPoint);
videoStall.setSegmentTryingToPlay(audioEvent);
videoStall.setStallEndTimestamp(audioEvent.getPlayTime());
double resumePoint = audioEvent.getDLLastTimestamp() + getStallRecovery();
videoStall.setStallEndTimestamp(resumePoint);
stalls.add(videoStall);
}
}
// advance to next segmentStartTime
key = audioStreamMap.higherKey(StringUtils.substringBefore(key, ":") + "z");
}
} catch (Exception e) {
e.printStackTrace();
}
return audioEvent;
}
Aggregations