use of com.att.aro.core.videoanalysis.pojo.StreamingVideoData in project VideoOptimzer by attdevsupport.
the class GraphPanel method refresh.
// In 4.1.1, the method name is resetChart(TraceData.Analysis analysis)
public void refresh(AROTraceData aroTraceData) {
getSaveGraphButton().setEnabled(aroTraceData != null);
if (combinedPlot != null) {
setGraphView(combinedPlot.getDomainCrosshairValue(), true);
} else {
setGraphView(0, true);
}
setTraceData(aroTraceData);
if (aroTraceData != null) {
setAllPackets(aroTraceData.getAnalyzerResult().getTraceresult().getAllpackets());
setTraceDuration(aroTraceData.getAnalyzerResult().getTraceresult().getTraceDuration());
// list
setAllTcpSessions(aroTraceData.getAnalyzerResult().getSessionlist().size());
// length
} else {
setAllPackets(new LinkedList<PacketInfo>());
setTraceDuration(0);
setAllTcpSessions(0);
}
if (aroTraceData != null && aroTraceData.getAnalyzerResult().getFilter() != null && aroTraceData.getAnalyzerResult().getFilter().getTimeRange() != null) {
if (aroTraceData.getAnalyzerResult().getSessionlist().size() > 0 && aroTraceData.getAnalyzerResult().getFilter().getTimeRange().getBeginTime() < aroTraceData.getAnalyzerResult().getFilter().getTimeRange().getEndTime()) {
getAxis().setRange(new Range(aroTraceData.getAnalyzerResult().getFilter().getTimeRange().getBeginTime(), aroTraceData.getAnalyzerResult().getFilter().getTimeRange().getEndTime()));
} else {
getAxis().setRange(new Range(-0.01, 0));
}
} else {
if (getEndTime() > 0) {
if (aroTraceData != null) {
getAxis().setRange(new Range(getStartTime(), getEndTime()));
}
// Reset times
setStartTime(0.0);
setEndTime(0.0);
} else {
getAxis().setRange(new Range(-0.01, aroTraceData != null ? aroTraceData.getAnalyzerResult().getTraceresult().getTraceDuration() : DEFAULT_TIMELINE));
}
}
if (aroTraceData != null && aroTraceData.getAnalyzerResult().getSessionlist().size() > 0) {
for (Map.Entry<ChartPlotOptions, GraphPanelPlotLabels> entry : getSubplotMap().entrySet()) {
switch(entry.getKey()) {
case THROUGHPUT:
if (throughput == null) {
throughput = new ThroughputPlot();
}
throughput.populate(entry.getValue().getPlot(), aroTraceData);
break;
case BURSTS:
if (burstPlot == null) {
burstPlot = new BurstPlot();
}
// burstPlot = new BurstPlot();
burstPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case RRC:
if (rrcPlot == null) {
rrcPlot = new RrcPlot();
}
rrcPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case USER_INPUT:
if (eventPlot == null) {
eventPlot = new UserEventPlot();
}
eventPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case DL_PACKETS:
if (dlPlot == null) {
dlPlot = new DLPacketPlot();
}
dlPlot.populate(entry.getValue().getPlot(), aroTraceData, true);
break;
case UL_PACKETS:
if (upPlot == null) {
upPlot = new DLPacketPlot();
}
upPlot.populate(entry.getValue().getPlot(), aroTraceData, false);
break;
case ALARM:
if (alarmPlot == null) {
alarmPlot = new AlarmPlot();
}
alarmPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case GPS:
if (gpsPlot == null) {
gpsPlot = new GpsPlot();
}
gpsPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case RADIO:
if (radioPlot == null) {
radioPlot = new RadioPlot();
}
radioPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case CPU:
if (cpuPlot == null) {
cpuPlot = new CpuPlot();
}
cpuPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case SCREEN:
if (ssPlot == null) {
ssPlot = new ScreenStatePlot();
}
ssPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case BATTERY:
if (bPlot == null) {
bPlot = new BatteryPlot();
}
bPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case TEMPERATURE:
if (tPlot == null) {
tPlot = new TemperaturePlot();
}
tPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case BLUETOOTH:
if (bluetoothPlot == null) {
bluetoothPlot = new BluetoothPlot();
}
bluetoothPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case WIFI:
if (wPlot == null) {
wPlot = new WifiPlot();
}
wPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case CAMERA:
if (cPlot == null) {
cPlot = new CameraPlot();
}
cPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case NETWORK_TYPE:
if (ntPlot == null) {
ntPlot = new NetworkTypePlot();
}
ntPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case WAKELOCK:
if (wlPlot == null) {
wlPlot = new WakeLockPlot();
}
wlPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case ATTENUATION:
if (attnrPlot == null) {
attnrPlot = new AttenuatorPlot();
}
attnrPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case SPEED_THROTTLE:
if (stPlot == null) {
stPlot = new SpeedThrottlePlot();
}
stPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case VIDEO_CHUNKS:
if (vcPlot == null) {
vcPlot = new VideoChunksPlot();
}
XYPlot bufferOccupancyPlot = getSubplotMap().get(ChartPlotOptions.BUFFER_OCCUPANCY).getPlot();
XYPlot bufferTimePlot = getSubplotMap().get(ChartPlotOptions.BUFFER_TIME_OCCUPANCY).getPlot();
this.chunkInfo.clear();
vcPlot.setBufferOccupancyPlot(bufferOccupancyPlot);
vcPlot.setBufferTimePlot(bufferTimePlot);
VideoStream selectedStream = null;
int count = 0;
StreamingVideoData streamingVideoData = aroTraceData.getAnalyzerResult().getStreamingVideoData();
if (streamingVideoData != null) {
for (VideoStream videoStream : streamingVideoData.getVideoStreamMap().values()) {
if (videoStream != null && videoStream.isSelected()) {
selectedStream = videoStream;
count++;
}
}
if (count == 1 && selectedStream != null && selectedStream.getManifest().getDelay() != 0) {
VideoEvent firstSegment = (VideoEvent) selectedStream.getVideoEventsBySegment().toArray()[0];
if (selectedStream.getManifest().getVideoFormat() == VideoFormat.MPEG4) {
for (VideoEvent video : selectedStream.getVideoEventsBySegment()) {
if (video.getSegmentID() != 0) {
firstSegment = video;
break;
}
}
}
vcPlot.refreshPlot(getSubplotMap().get(ChartPlotOptions.VIDEO_CHUNKS).getPlot(), aroTraceData, selectedStream.getManifest().getDelay() + firstSegment.getEndTS(), firstSegment);
} else {
vcPlot.populate(entry.getValue().getPlot(), aroTraceData);
}
}
break;
case CONNECTIONS:
connectionsPlot = new ConnectionsPlot();
connectionsPlot.populate(entry.getValue().getPlot(), aroTraceData);
break;
case LATENCY:
if (latencyplot == null) {
latencyplot = new LatencyPlot();
}
latencyplot.populate(entry.getValue().getPlot(), aroTraceData);
break;
default:
break;
}
}
}
getZoomInButton().setEnabled(aroTraceData != null);
getZoomOutButton().setEnabled(aroTraceData != null);
getSaveGraphButton().setEnabled(aroTraceData != null);
if (aroTraceData != null) {
parent.getDeviceNetworkProfilePanel().refresh(aroTraceData);
}
}
use of com.att.aro.core.videoanalysis.pojo.StreamingVideoData in project VideoOptimzer by attdevsupport.
the class SegmentTablePanel method refreshParent.
// refreshments
protected void refreshParent() {
StreamingVideoData streamingVideoData = analyzerResult.getAnalyzerResult().getStreamingVideoData();
if (streamingVideoData != null) {
streamingVideoData.scanVideoStreams();
}
((MainFrame) aroView).getVideoTab().refreshLocal(analyzerResult, false);
}
use of com.att.aro.core.videoanalysis.pojo.StreamingVideoData in project VideoOptimzer by attdevsupport.
the class VideoSegmentAnalyzer method process.
public void process(AbstractTraceResult result, StreamingVideoData streamingVideoData) {
if (result instanceof TraceDirectoryResult) {
videoStreamStartupData = ((TraceDirectoryResult) result).getVideoStartupData();
this.videoPrefs = videoUsagePrefsManager.getVideoUsagePreference();
if (!CollectionUtils.isEmpty(streamingVideoData.getVideoStreamMap())) {
NavigableMap<Double, VideoStream> reverseVideoStreamMap = streamingVideoData.getVideoStreamMap().descendingMap();
for (VideoStream videoStream : reverseVideoStreamMap.values()) {
if (!CollectionUtils.isEmpty(videoStream.getVideoEventMap())) {
if ((videoStreamStartup = locateStartupDelay(result, videoStream)) == null) {
// StartupDelay could not be set, usually an invalid Stream
continue;
}
double startupDelay;
VideoEvent chosenEvent;
if (videoStreamStartup != null && videoStream.getManifest().getVideoName().equals(videoStreamStartup.getManifestName())) {
startupDelay = videoStreamStartup.getStartupTime();
chosenEvent = videoStream.getVideoEventBySegment(videoStreamStartup.getFirstSegID());
if (videoStreamStartup.getUserEvent() != null) {
videoStream.setPlayRequestedTime(videoStreamStartup.getUserEvent().getPressTime());
}
} else {
continue;
}
duplicateHandling = videoPrefs.getDuplicateHandling();
LOG.debug(String.format("Stream RQ:%10.3f", videoStream.getManifest().getRequestTime()));
applyStartupDelayToStream(startupDelay, chosenEvent, videoStream, streamingVideoData);
videoStream.setDuration(videoStream.getVideoEventMap().entrySet().stream().filter(f -> f.getValue().isSelected() && f.getValue().isNormalSegment()).mapToDouble(x -> x.getValue().getDuration()).sum());
} else {
videoStream.setDuration(0);
videoStream.setSelected(false);
videoStream.setValid(false);
}
}
}
}
}
use of com.att.aro.core.videoanalysis.pojo.StreamingVideoData in project VideoOptimzer by attdevsupport.
the class VideoTrafficInferencer method inferVideoData.
public StreamingVideoData inferVideoData(AbstractTraceResult result, List<Session> sessionlist, String manifestFilePath) {
videoRequestMap = new HashSet<>();
nonSegmentRequestMap = new HashMap<>();
possibleAudioRequestMap = new TreeMap<>();
streamingVideoData = new StreamingVideoData(result.getTraceDirectory());
boolean flag = false;
File manifestFile;
if (result.getTraceDirectory().equals(manifestFilePath)) {
CSIManifestAndState csiState = csiDataHelper.readData(manifestFilePath + System.getProperty("file.separator") + "CSI");
if (csiState.getAnalysisState().equals("Fail")) {
return streamingVideoData;
}
manifestFile = csiDataHelper.generateManifestPath(manifestFilePath, csiState.getManifestFileName());
} else {
flag = true;
manifestFile = new File(manifestFilePath);
}
byte[] fileContent;
VideoManifest videoManifest = new VideoManifest();
List<Track> tracks = new ArrayList<>();
String fileExtName = FilenameUtils.getExtension(manifestFile.getPath());
requestMap = generateRequestMap(sessionlist);
if (manifestFile.exists() && fileExtName != null) {
switch(fileExtName) {
case "json":
ObjectMapper mapper = new ObjectMapper();
mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
try {
videoManifest = mapper.readValue(manifestFile, VideoManifest.class);
} catch (IOException ioe) {
LOGGER.error("Exception while parsing Manifest JSON for CSI", ioe);
}
break;
case "mpd":
try {
fileContent = Files.readAllBytes(manifestFile.toPath());
ManifestBuilderDASH manifestBuilderDASH = new ManifestBuilderDASH();
manifestBuilderDASH.create(requestMap.values().iterator().next(), fileContent, "blank");
for (ChildManifest cManifest : manifestBuilderDASH.getManifestCollection().getSegmentChildManifestListInOrder()) {
Track sTrack = new Track();
List<Integer> segmentSizes = new ArrayList<Integer>();
List<Double> segmentDurations = new ArrayList<Double>();
sTrack.setMediaType(cManifest.isVideo() ? MediaType.VIDEO : MediaType.AUDIO);
sTrack.setMediaBandwidth((float) cManifest.getBandwidth());
cManifest.getSegmentInfoTrie().values().forEach((segment) -> segmentSizes.add(segment.getSize()));
cManifest.getSegmentInfoTrie().values().forEach((segment) -> segmentDurations.add(segment.getDuration()));
sTrack.setSegmentSizes(segmentSizes);
sTrack.setSegmentDurations(segmentDurations);
tracks.add(sTrack);
}
} catch (IOException ioe) {
LOGGER.error("Exception while parsing MPD for CSI", ioe);
}
videoManifest.setTracks(tracks);
case "m3u8":
try {
videoManifest = hlsManifestParseImpl.getManifest(videoManifest, manifestFile);
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
break;
default:
break;
}
}
List<Segment> candidateList = prepareCandidates(requestMap, videoManifest);
Map<Integer, List<Segment>> edgeDistanceMap = calculateDistancesAndPopulateAdjacencyList(candidateList, requestMap);
TreeMap<Integer, List<Integer>> lastNodes = findShortestPath(candidateList, edgeDistanceMap);
if (!lastNodes.isEmpty()) {
VideoStream videoStream = new VideoStream();
videoStream.setManifest(createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.MASTER, ContentType.MUXED));
streamingVideoData.addVideoStream(firstRequestTimeStamp, videoStream);
List<Segment> solution = getLikelySequences(candidateList, edgeDistanceMap, lastNodes);
Manifest manifest;
if (!solution.isEmpty()) {
for (Segment segment : solution) {
manifest = createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.CHILD, ContentType.VIDEO);
ChildManifest childManifest = new ChildManifest();
childManifest.setManifest(manifest);
VideoEvent videoEvent = new VideoEvent(getDefaultThumbnail(), manifest, segment, requestMap.get(segment.getRequestKey()));
videoRequestMap.add(requestMap.get(segment.getRequestKey()));
videoEvent.setChildManifest(childManifest);
videoStream.addVideoEvent(videoEvent);
}
int segmentIndex = 0;
Track audioTrack;
if ((audioTrack = videoManifest.getTracks().stream().filter(track -> (track.getMediaType()).equals(MediaType.AUDIO)).findFirst().get()) != null) {
for (HttpRequestResponseInfo rrInfo : possibleAudioRequestMap.values()) {
if (!videoRequestMap.contains(rrInfo) && rrInfo.getTime() > videoStream.getFirstSegment().getDLTime()) {
Segment audioSegment = new Segment(videoManifest, videoManifest.getAudioTrack(), ++segmentIndex, audioTrack.getSegmentSizes().get(segmentIndex - 1), rrInfo.getKey(), rrInfo.getRequestCounterCSI(), -1);
manifest = createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.CHILD, ContentType.AUDIO);
ChildManifest childManifest = new ChildManifest();
childManifest.setManifest(manifest);
VideoEvent videoEvent = new VideoEvent(getDefaultThumbnail(), manifest, audioSegment, rrInfo);
videoEvent.setChildManifest(childManifest);
videoStream.addVideoEvent(videoEvent);
}
}
}
}
}
if (flag) {
saveCSIManifestAndState(manifestFile.toString());
}
videoSegmentAnalyzer.process(result, streamingVideoData);
return streamingVideoData;
}
use of com.att.aro.core.videoanalysis.pojo.StreamingVideoData in project VideoOptimzer by attdevsupport.
the class VideoChunksPlot method populate.
@Override
public void populate(XYPlot plot, AROTraceData traceData) {
if (traceData != null) {
StreamingVideoData streamingVideoData = traceData.getAnalyzerResult().getStreamingVideoData();
if (!isReDraw) {
bufferOccupancyPlot.clearPlot(this.bufferOccupancyXYPlot);
bufferInSecondsPlot.clearPlot(this.bufferTimeXYPlot);
traceData.getAnalyzerResult().setBufferTimeResult(null);
traceData.getAnalyzerResult().setBufferOccupancyResult(null);
}
videoChunksData.removeAllSeries();
for (XYSeriesCollection seriesColl : startUpDelayCollection) {
seriesColl.removeAllSeries();
}
startUpDelayCollection.clear();
imgSeries = new ArrayList<BufferedImage>();
// create the dataset...
int index = 0;
series = new XYSeries("Chunks");
seriesDataSets = new TreeMap<>();
seriesDataSets = videoChunkPlotter.populateDataSet(traceData.getAnalyzerResult().getStreamingVideoData());
imgSeries = videoChunkPlotter.getImgSeries();
filteredChunks = streamingVideoData.getStreamingVideoCompiled().getFilteredSegments();
segmentsToBePlayed.clear();
for (VideoEvent ve : streamingVideoData.getStreamingVideoCompiled().getAllSegments()) {
segmentsToBePlayed.add(ve);
}
for (double timeStamp : seriesDataSets.values()) {
series.add(timeStamp, 0);
}
XYSeriesCollection playTimeStartSeries = new XYSeriesCollection();
int first = 0;
List<VideoEvent> chunkPlayBackTimeList = new ArrayList<VideoEvent>(chunkPlayTime.keySet());
Collections.sort(chunkPlayBackTimeList, new VideoEventComparator(SortSelection.SEGMENT_ID));
if (CollectionUtils.isNotEmpty(chunkPlayBackTimeList)) {
VideoEvent ve = chunkPlayBackTimeList.get(0);
seriesStartUpDelay = new XYSeries("StartUpDelay" + (index++));
seriesStartUpDelay.add(ve.getDLTimeStamp(), 0);
Double playTime = chunkPlayTime.get(ve);
if (playTime != null) {
seriesStartUpDelay.add((double) playTime, 0);
}
if (first == 0) {
StreamingVideoData videoData = traceData.getAnalyzerResult().getStreamingVideoData();
SortedMap<Double, VideoStream> videoEventList = videoData.getVideoStreamMap();
Double segPlayTime = chunkPlayTime.get(ve);
if (segPlayTime != null) {
setDelayVideoStream((double) segPlayTime - ve.getEndTS(), videoEventList.values());
}
}
playTimeStartSeries.addSeries(seriesStartUpDelay);
startUpDelayCollection.add(playTimeStartSeries);
first++;
}
for (VideoStream videoStream : streamingVideoData.getVideoStreams()) {
if (videoStream.isSelected()) {
for (VideoStall videoStall : videoStream.getVideoStallList()) {
playTimeStartSeries = new XYSeriesCollection();
seriesStartUpDelay = new XYSeries("StartUpDelay" + (index++));
seriesStartUpDelay.add(videoStall.getStallStartTimestamp(), 0);
seriesStartUpDelay.add(videoStall.getStallEndTimestamp(), 0);
playTimeStartSeries.addSeries(seriesStartUpDelay);
startUpDelayCollection.add(playTimeStartSeries);
}
}
}
videoChunksData.addSeries(series);
// Startup and stalls
VideoChunkImageRenderer renderer = new VideoChunkImageRenderer();
XYLineAndShapeRenderer rendererDelay = new XYLineAndShapeRenderer();
for (int idx = 0; idx < startUpDelayCollection.size(); idx++) {
rendererDelay.setSeriesStroke(idx, new BasicStroke(2.0f));
rendererDelay.setSeriesPaint(idx, Color.RED);
}
renderer.setBaseToolTipGenerator(toolTipGenerator());
renderer.setSeriesShape(0, shape);
plot.setRenderer(index, renderer);
for (int i = 0; i < startUpDelayCollection.size(); i++) {
plot.setRenderer(i, rendererDelay);
}
}
isReDraw = false;
int seriesIndex = 0;
for (XYSeriesCollection seriesColl : startUpDelayCollection) {
plot.setDataset(seriesIndex, seriesColl);
seriesIndex++;
}
plot.setDataset(seriesIndex, videoChunksData);
}
Aggregations