use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class VideoSegmentAnalyzer method applyStartupDelayToStream.
public void applyStartupDelayToStream(double startupTime, VideoEvent chosenVideoEvent, VideoStream videoStream, StreamingVideoData streamingVideoData) {
if (chosenVideoEvent == null && (chosenVideoEvent = videoStream.getFirstSegment()) != null) {
chosenVideoEvent.setSelected(true);
videoStream.getManifest().setStartupVideoEvent(chosenVideoEvent);
}
if (chosenVideoEvent != null) {
propagatePlaytime(startupTime, chosenVideoEvent, videoStream);
chosenVideoEvent.setPlayTime(startupTime);
videoStream.setVideoPlayBackTime(startupTime);
videoStream.getManifest().setDelay(startupTime - chosenVideoEvent.getEndTS());
videoStream.getManifest().setStartupVideoEvent(chosenVideoEvent);
videoStream.getManifest().setStartupDelay(chosenVideoEvent.getSegmentStartTime() - videoStream.getManifest().getRequestTime());
}
streamingVideoData.scanVideoStreams();
for (VideoStream stream : streamingVideoData.getVideoStreamMap().values()) {
if (stream.equals(videoStream)) {
stream.setSelected(true);
stream.setCurrentStream(true);
} else {
stream.setSelected(false);
stream.setCurrentStream(false);
}
}
}
use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class VideoSegmentAnalyzer method generateByteBufferData.
/**
* Scans VideoStream to produce/populate - VideoStream.byteBufferList -
* VideoStream.toolTipDetailMap
*
* @param videoStream
*/
private void generateByteBufferData(VideoStream videoStream) {
VideoEvent eventPlay = null;
Double buffer = 0D;
VideoEvent eventDL;
double timeKey = 0D;
this.videoStream = videoStream;
videoStream.clearBufferOccupancyData();
TreeMap<Double, VideoEvent> mergedPlayMap = new TreeMap<>();
TreeMap<String, VideoEvent> mergedMap = new TreeMap<>();
videoStream.getVideoEventMap().entrySet().stream().filter((f) -> !f.getValue().isFailedRequest()).forEach(e -> {
mergedMap.put(e.getKey(), e.getValue());
});
videoStream.getAudioEventMap().entrySet().stream().filter((f) -> !f.getValue().isFailedRequest()).forEach(e -> {
mergedMap.put(e.getKey(), e.getValue());
});
byteBufferList = videoStream.getByteBufferList();
for (String key : mergedMap.keySet()) {
eventDL = mergedMap.get(key);
if (eventDL.isNormalSegment()) {
if (timeKey > 0 && (timeKey < eventDL.getEndTS())) {
eventPlay = mergedPlayMap.get(timeKey);
while (eventPlay != null && eventPlay.getPlayTime() <= eventDL.getEndTS()) {
mergedPlayMap.remove(eventPlay.getPlayTime());
buffer = addByteBufferPoints(buffer, eventPlay, eventPlay.getPlayTime(), -eventPlay.getSize());
timeKey = mergedPlayMap.isEmpty() ? 0 : mergedPlayMap.firstKey();
eventPlay = mergedPlayMap.isEmpty() ? null : mergedPlayMap.get(mergedPlayMap.firstKey());
}
}
mergedPlayMap.put(eventDL.getPlayTime(), eventDL);
timeKey = mergedPlayMap.firstKey();
buffer = addByteBufferPoints(buffer, eventDL, eventDL.getEndTS(), eventDL.getSize());
}
}
timeKey = mergedPlayMap.isEmpty() ? 0 : mergedPlayMap.firstKey();
while (!mergedPlayMap.isEmpty()) {
eventPlay = mergedPlayMap.remove(timeKey);
buffer = addByteBufferPoints(buffer, eventPlay, eventPlay.getPlayTime(), -eventPlay.getSize());
timeKey = mergedPlayMap.isEmpty() ? 0 : mergedPlayMap.firstKey();
eventPlay = mergedPlayMap.isEmpty() ? null : mergedPlayMap.get(mergedPlayMap.firstKey());
}
}
use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class VideoSegmentAnalyzer method process.
public void process(AbstractTraceResult result, StreamingVideoData streamingVideoData) {
if (result instanceof TraceDirectoryResult) {
videoStreamStartupData = ((TraceDirectoryResult) result).getVideoStartupData();
this.videoPrefs = videoUsagePrefsManager.getVideoUsagePreference();
if (!CollectionUtils.isEmpty(streamingVideoData.getVideoStreamMap())) {
NavigableMap<Double, VideoStream> reverseVideoStreamMap = streamingVideoData.getVideoStreamMap().descendingMap();
for (VideoStream videoStream : reverseVideoStreamMap.values()) {
if (!CollectionUtils.isEmpty(videoStream.getVideoEventMap())) {
if ((videoStreamStartup = locateStartupDelay(result, videoStream)) == null) {
// StartupDelay could not be set, usually an invalid Stream
continue;
}
double startupDelay;
VideoEvent chosenEvent;
if (videoStreamStartup != null && videoStream.getManifest().getVideoName().equals(videoStreamStartup.getManifestName())) {
startupDelay = videoStreamStartup.getStartupTime();
chosenEvent = videoStream.getVideoEventBySegment(videoStreamStartup.getFirstSegID());
if (videoStreamStartup.getUserEvent() != null) {
videoStream.setPlayRequestedTime(videoStreamStartup.getUserEvent().getPressTime());
}
} else {
continue;
}
duplicateHandling = videoPrefs.getDuplicateHandling();
LOG.debug(String.format("Stream RQ:%10.3f", videoStream.getManifest().getRequestTime()));
applyStartupDelayToStream(startupDelay, chosenEvent, videoStream, streamingVideoData);
videoStream.setDuration(videoStream.getVideoEventMap().entrySet().stream().filter(f -> f.getValue().isSelected() && f.getValue().isNormalSegment()).mapToDouble(x -> x.getValue().getDuration()).sum());
} else {
videoStream.setDuration(0);
videoStream.setSelected(false);
videoStream.setValid(false);
}
}
}
}
}
use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class BufferOccupancyCalculatorImpl method initialize.
private void initialize(StreamingVideoData streamingVideoData) {
filteredSegments = streamingVideoData.getStreamingVideoCompiled().getFilteredSegments();
for (VideoStream videoStream : streamingVideoData.getVideoStreamMap().values()) {
if (videoStream.isSelected()) {
for (VideoEvent ve : videoStream.getVideoEventMap().values()) {
if (ve.getSegmentID() != 0) {
chunkDownload.add(ve);
}
}
}
}
chunkPlay.clear();
for (VideoEvent vEvent : chunkDownload) {
chunkPlay.add(vEvent);
}
Collections.sort(chunkPlay, new VideoEventComparator(SortSelection.SEGMENT_ID));
Collections.sort(filteredSegments, new VideoEventComparator(SortSelection.SEGMENT_ID));
runInit(streamingVideoData, filteredSegments);
}
use of com.att.aro.core.videoanalysis.pojo.VideoStream in project VideoOptimzer by attdevsupport.
the class VideoTrafficInferencer method inferVideoData.
public StreamingVideoData inferVideoData(AbstractTraceResult result, List<Session> sessionlist, String manifestFilePath) {
videoRequestMap = new HashSet<>();
nonSegmentRequestMap = new HashMap<>();
possibleAudioRequestMap = new TreeMap<>();
streamingVideoData = new StreamingVideoData(result.getTraceDirectory());
boolean flag = false;
File manifestFile;
if (result.getTraceDirectory().equals(manifestFilePath)) {
CSIManifestAndState csiState = csiDataHelper.readData(manifestFilePath + System.getProperty("file.separator") + "CSI");
if (csiState.getAnalysisState().equals("Fail")) {
return streamingVideoData;
}
manifestFile = csiDataHelper.generateManifestPath(manifestFilePath, csiState.getManifestFileName());
} else {
flag = true;
manifestFile = new File(manifestFilePath);
}
byte[] fileContent;
VideoManifest videoManifest = new VideoManifest();
List<Track> tracks = new ArrayList<>();
String fileExtName = FilenameUtils.getExtension(manifestFile.getPath());
requestMap = generateRequestMap(sessionlist);
if (manifestFile.exists() && fileExtName != null) {
switch(fileExtName) {
case "json":
ObjectMapper mapper = new ObjectMapper();
mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
try {
videoManifest = mapper.readValue(manifestFile, VideoManifest.class);
} catch (IOException ioe) {
LOGGER.error("Exception while parsing Manifest JSON for CSI", ioe);
}
break;
case "mpd":
try {
fileContent = Files.readAllBytes(manifestFile.toPath());
ManifestBuilderDASH manifestBuilderDASH = new ManifestBuilderDASH();
manifestBuilderDASH.create(requestMap.values().iterator().next(), fileContent, "blank");
for (ChildManifest cManifest : manifestBuilderDASH.getManifestCollection().getSegmentChildManifestListInOrder()) {
Track sTrack = new Track();
List<Integer> segmentSizes = new ArrayList<Integer>();
List<Double> segmentDurations = new ArrayList<Double>();
sTrack.setMediaType(cManifest.isVideo() ? MediaType.VIDEO : MediaType.AUDIO);
sTrack.setMediaBandwidth((float) cManifest.getBandwidth());
cManifest.getSegmentInfoTrie().values().forEach((segment) -> segmentSizes.add(segment.getSize()));
cManifest.getSegmentInfoTrie().values().forEach((segment) -> segmentDurations.add(segment.getDuration()));
sTrack.setSegmentSizes(segmentSizes);
sTrack.setSegmentDurations(segmentDurations);
tracks.add(sTrack);
}
} catch (IOException ioe) {
LOGGER.error("Exception while parsing MPD for CSI", ioe);
}
videoManifest.setTracks(tracks);
case "m3u8":
try {
videoManifest = hlsManifestParseImpl.getManifest(videoManifest, manifestFile);
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
break;
default:
break;
}
}
List<Segment> candidateList = prepareCandidates(requestMap, videoManifest);
Map<Integer, List<Segment>> edgeDistanceMap = calculateDistancesAndPopulateAdjacencyList(candidateList, requestMap);
TreeMap<Integer, List<Integer>> lastNodes = findShortestPath(candidateList, edgeDistanceMap);
if (!lastNodes.isEmpty()) {
VideoStream videoStream = new VideoStream();
videoStream.setManifest(createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.MASTER, ContentType.MUXED));
streamingVideoData.addVideoStream(firstRequestTimeStamp, videoStream);
List<Segment> solution = getLikelySequences(candidateList, edgeDistanceMap, lastNodes);
Manifest manifest;
if (!solution.isEmpty()) {
for (Segment segment : solution) {
manifest = createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.CHILD, ContentType.VIDEO);
ChildManifest childManifest = new ChildManifest();
childManifest.setManifest(manifest);
VideoEvent videoEvent = new VideoEvent(getDefaultThumbnail(), manifest, segment, requestMap.get(segment.getRequestKey()));
videoRequestMap.add(requestMap.get(segment.getRequestKey()));
videoEvent.setChildManifest(childManifest);
videoStream.addVideoEvent(videoEvent);
}
int segmentIndex = 0;
Track audioTrack;
if ((audioTrack = videoManifest.getTracks().stream().filter(track -> (track.getMediaType()).equals(MediaType.AUDIO)).findFirst().get()) != null) {
for (HttpRequestResponseInfo rrInfo : possibleAudioRequestMap.values()) {
if (!videoRequestMap.contains(rrInfo) && rrInfo.getTime() > videoStream.getFirstSegment().getDLTime()) {
Segment audioSegment = new Segment(videoManifest, videoManifest.getAudioTrack(), ++segmentIndex, audioTrack.getSegmentSizes().get(segmentIndex - 1), rrInfo.getKey(), rrInfo.getRequestCounterCSI(), -1);
manifest = createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.CHILD, ContentType.AUDIO);
ChildManifest childManifest = new ChildManifest();
childManifest.setManifest(manifest);
VideoEvent videoEvent = new VideoEvent(getDefaultThumbnail(), manifest, audioSegment, rrInfo);
videoEvent.setChildManifest(childManifest);
videoStream.addVideoEvent(videoEvent);
}
}
}
}
}
if (flag) {
saveCSIManifestAndState(manifestFile.toString());
}
videoSegmentAnalyzer.process(result, streamingVideoData);
return streamingVideoData;
}
Aggregations